nlpsol.cpp
1 /*
2  * This file is part of CasADi.
3  *
4  * CasADi -- A symbolic framework for dynamic optimization.
5  * Copyright (C) 2010-2023 Joel Andersson, Joris Gillis, Moritz Diehl,
6  * KU Leuven. All rights reserved.
7  * Copyright (C) 2011-2014 Greg Horn
8  *
9  * CasADi is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 3 of the License, or (at your option) any later version.
13  *
14  * CasADi is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with CasADi; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  *
23  */
24 
25 
26 #include "nlpsol_impl.hpp"
27 #include "external.hpp"
28 #include "casadi/core/timing.hpp"
29 #include "nlp_builder.hpp"
30 #include "nlp_tools.hpp"
31 
32 namespace casadi {
33 
34  bool has_nlpsol(const std::string& name) {
35  return Nlpsol::has_plugin(name);
36  }
37 
38  void load_nlpsol(const std::string& name) {
39  Nlpsol::load_plugin(name);
40  }
41 
42  std::string doc_nlpsol(const std::string& name) {
43  return Nlpsol::getPlugin(name).doc;
44  }
45 
46  template<class X>
47  Function construct_nlpsol(const std::string& name, const std::string& solver,
48  const std::map<std::string, X>& nlp, const Dict& opts) {
49 
50  if (get_from_dict(opts, "detect_simple_bounds", false)) {
51  X x = get_from_dict(nlp, "x", X(0, 1));
52  X p = get_from_dict(nlp, "p", X(0, 1));
53  X f = get_from_dict(nlp, "f", X(0));
54  X g = get_from_dict(nlp, "g", X(0, 1));
55 
56  if (g.size1()>0 || g.size2()>0) {
57  // Dimension checks
58  casadi_assert(g.is_dense() && g.is_vector(),
59  "Expected a dense vector 'g', but got " + g.dim(true) + ".");
60  }
61 
62  // Read dimensions
63  casadi_int ng = g.size1();
64  casadi_int nx = x.size1();
65 
66  // Get constraint Jacobian sparsity
67  Sparsity sp = jacobian_sparsity(g, x).T();
68 
69  // Reset result vector
70  std::vector<bool> is_simple(ng, true);
71 
72  // Check nonlinearity
73  std::vector<bool> is_nonlin = which_depends(g, x, 2, true);
74 
75  const casadi_int* row = sp.colind();
76  for (casadi_int i=0;i<ng;++i) {
77  // Check if each row of jac_g_x only depends on one column
78  bool single_dependency = row[i+1]-row[i]==1;
79  is_simple[i] = single_dependency && !is_nonlin[i];
80  }
81 
82  // Full-indices of all simple constraints
83  std::vector<casadi_int> sgi = boolvec_to_index(is_simple);
84  std::vector<casadi_int> gi = boolvec_to_index(boolvec_not(is_simple));
85  X g_bounds = g(sgi);
86 
87  // Detect f2(p)x+f1(p)==0
88  Function gf = Function("gf", std::vector<X>{x, p},
89  std::vector<X>{jtimes(g_bounds, x, X::ones(nx, 1)), g_bounds});
90  casadi_assert_dev(!gf.has_free());
91 
92  std::vector<casadi_int> target_x;
93  // Loop over all constraints
94  for (casadi_int i=0;i<ng;++i) {
95  // Only treat simple ones
96  if (!is_simple[i]) continue;
97  target_x.push_back(sp.row()[row[i]]);
98  }
99 
100  Dict nlpsol_opts = opts;
101  nlpsol_opts["detect_simple_bounds_is_simple"] = is_simple;
102  nlpsol_opts["detect_simple_bounds_parts"] = gf;
103  nlpsol_opts["detect_simple_bounds_target_x"] = target_x;
104 
105  if (opts.find("equality")!=opts.end()) {
106  std::vector<bool> equality = opts.find("equality")->second;
107  nlpsol_opts["equality"] = vector_select(equality, is_simple, true);
108  }
109 
110  std::map<std::string, X> nlpsol_nlp = nlp;
111  nlpsol_nlp["g"] = g(gi);
112  return nlpsol(name, solver, Nlpsol::create_oracle(nlpsol_nlp, opts), nlpsol_opts);
113  } else {
114  return nlpsol(name, solver, Nlpsol::create_oracle(nlp, opts), opts);
115  }
116  }
117 
118  Function nlpsol(const std::string& name, const std::string& solver,
119  const SXDict& nlp, const Dict& opts) {
120  return construct_nlpsol(name, solver, nlp, opts);
121  }
122 
123  Function nlpsol(const std::string& name, const std::string& solver,
124  const MXDict& nlp, const Dict& opts) {
125  return construct_nlpsol(name, solver, nlp, opts);
126  }
127 
128  template<typename XType>
129  Function Nlpsol::create_oracle(const std::map<std::string, XType>& d,
130  const Dict& opts) {
131  std::vector<XType> nl_in(NL_NUM_IN), nl_out(NL_NUM_OUT);
132  for (auto&& i : d) {
133  if (i.first=="x") {
134  nl_in[NL_X]=i.second;
135  } else if (i.first=="p") {
136  nl_in[NL_P]=i.second;
137  } else if (i.first=="f") {
138  nl_out[NL_F]=i.second;
139  } else if (i.first=="g") {
140  nl_out[NL_G]=i.second;
141  } else {
142  casadi_error("No such field: " + i.first);
143  }
144  }
145  if (nl_out[NL_F].is_empty()) nl_out[NL_F] = 0;
146  if (nl_out[NL_G].is_empty()) nl_out[NL_G] = XType(0, 1);
147 
148  // Options for the oracle
149  Dict oracle_options;
150  Dict::const_iterator it = opts.find("oracle_options");
151  if (it != opts.end()) {
152  // "oracle_options" has been set
153  oracle_options = it->second;
154  } else {
155  // Propagate selected options from Nlpsol to oracle by default
156  for (const char* op : {"verbose", "regularity_check"})
157  if ((it = opts.find(op)) != opts.end()) {
158  oracle_options[op] = it->second;
159  }
160  }
161 
162  // Create oracle
163  return Function("nlp", nl_in, nl_out, NL_INPUTS, NL_OUTPUTS, oracle_options);
164  }
165 
166  Function nlpsol(const std::string& name, const std::string& solver,
167  const NlpBuilder& nl, const Dict& opts) {
168  MXDict nlp;
169  nlp["x"] = vertcat(nl.x);
170  nlp["f"] = nl.f;
171  nlp["g"] = vertcat(nl.g);
172  return nlpsol(name, solver, nlp, opts);
173  }
174 
175  Function nlpsol(const std::string& name, const std::string& solver,
176  const std::string& fname, const Dict& opts) {
177  // If fname ends with .c, JIT
178  if (fname.size()>2 && fname.compare(fname.size()-2, fname.size(), ".c")==0) {
179  Importer compiler(fname, "clang");
180  return nlpsol(name, solver, compiler, opts);
181  } else {
182  return nlpsol(name, solver, external("nlp", fname), opts);
183  }
184  }
185 
186  Function nlpsol(const std::string& name, const std::string& solver,
187  const Importer& compiler, const Dict& opts) {
188  return nlpsol(name, solver, external("nlp", compiler), opts);
189  }
190 
191  Function nlpsol(const std::string& name, const std::string& solver,
192  const Function& nlp, const Dict& opts) {
193  // Make sure that nlp is sound
194  if (nlp.has_free()) {
195  casadi_error("Cannot create '" + name + "' since " + str(nlp.get_free()) + " are free.");
196  }
197  return Function::create(Nlpsol::instantiate(name, solver, nlp), opts);
198  }
199 
200  std::vector<std::string> nlpsol_in() {
201  std::vector<std::string> ret(nlpsol_n_in());
202  for (size_t i=0; i<ret.size(); ++i) ret[i]=nlpsol_in(i);
203  return ret;
204  }
205 
206  std::vector<std::string> nlpsol_out() {
207  std::vector<std::string> ret(nlpsol_n_out());
208  for (size_t i=0; i<ret.size(); ++i) ret[i]=nlpsol_out(i);
209  return ret;
210  }
211 
212  double nlpsol_default_in(casadi_int ind) {
213  switch (ind) {
214  case NLPSOL_LBX:
215  case NLPSOL_LBG:
216  return -std::numeric_limits<double>::infinity();
217  case NLPSOL_UBX:
218  case NLPSOL_UBG:
219  return std::numeric_limits<double>::infinity();
220  default:
221  return 0;
222  }
223  }
224 
225  std::vector<double> nlpsol_default_in() {
226  std::vector<double> ret(nlpsol_n_in());
227  for (size_t i=0; i<ret.size(); ++i) ret[i]=nlpsol_default_in(i);
228  return ret;
229  }
230 
231  std::string nlpsol_in(casadi_int ind) {
232  switch (static_cast<NlpsolInput>(ind)) {
233  case NLPSOL_X0: return "x0";
234  case NLPSOL_P: return "p";
235  case NLPSOL_LBX: return "lbx";
236  case NLPSOL_UBX: return "ubx";
237  case NLPSOL_LBG: return "lbg";
238  case NLPSOL_UBG: return "ubg";
239  case NLPSOL_LAM_X0: return "lam_x0";
240  case NLPSOL_LAM_G0: return "lam_g0";
241  case NLPSOL_NUM_IN: break;
242  }
243  return std::string();
244  }
245 
246  std::string nlpsol_out(casadi_int ind) {
247  switch (static_cast<NlpsolOutput>(ind)) {
248  case NLPSOL_X: return "x";
249  case NLPSOL_F: return "f";
250  case NLPSOL_G: return "g";
251  case NLPSOL_LAM_X: return "lam_x";
252  case NLPSOL_LAM_G: return "lam_g";
253  case NLPSOL_LAM_P: return "lam_p";
254  case NLPSOL_NUM_OUT: break;
255  }
256  return std::string();
257  }
258 
259  casadi_int nlpsol_n_in() {
260  return NLPSOL_NUM_IN;
261  }
262 
263  casadi_int nlpsol_n_out() {
264  return NLPSOL_NUM_OUT;
265  }
266 
267  Nlpsol::Nlpsol(const std::string& name, const Function& oracle)
268  : OracleFunction(name, oracle) {
269 
270  // Set default options
271  callback_step_ = 1;
272  eval_errors_fatal_ = false;
273  warn_initial_bounds_ = false;
275  print_time_ = true;
276  calc_multipliers_ = false;
277  bound_consistency_ = false;
278  min_lam_ = 0;
279  calc_lam_x_ = calc_f_ = calc_g_ = false;
280  calc_lam_p_ = true;
281  no_nlp_grad_ = false;
282  error_on_fail_ = false;
283  sens_linsol_ = "qr";
284  }
285 
287  clear_mem();
288  }
289 
290  bool Nlpsol::is_a(const std::string& type, bool recursive) const {
291  return type=="Nlpsol" || (recursive && OracleFunction::is_a(type, recursive));
292  }
293 
295  switch (static_cast<NlpsolInput>(i)) {
296  case NLPSOL_X0:
297  case NLPSOL_LBX:
298  case NLPSOL_UBX:
299  case NLPSOL_LAM_X0:
300  return get_sparsity_out(NLPSOL_X);
301  case NLPSOL_LBG:
302  case NLPSOL_UBG:
303  case NLPSOL_LAM_G0:
304  return get_sparsity_out(NLPSOL_G);
305  case NLPSOL_P:
306  return oracle_.sparsity_in(NL_P);
307  case NLPSOL_NUM_IN: break;
308  }
309  return Sparsity();
310  }
311 
313  switch (static_cast<NlpsolOutput>(i)) {
314  case NLPSOL_F:
315  return oracle_.sparsity_out(NL_F);
316  case NLPSOL_X:
317  case NLPSOL_LAM_X:
318  return oracle_.sparsity_in(NL_X);
319  case NLPSOL_LAM_G:
320  case NLPSOL_G:
321  if (detect_simple_bounds_is_simple_.empty()) {
322  return oracle_.sparsity_out(NL_G);
323  } else {
325  }
326  case NLPSOL_LAM_P:
327  return get_sparsity_in(NLPSOL_P);
328  case NLPSOL_NUM_OUT: break;
329  }
330  return Sparsity();
331  }
332 
335  {{"iteration_callback",
336  {OT_FUNCTION,
337  "A function that will be called at each iteration with the solver as input. "
338  "Check documentation of Callback."}},
339  {"iteration_callback_step",
340  {OT_INT,
341  "Only call the callback function every few iterations."}},
342  {"iteration_callback_ignore_errors",
343  {OT_BOOL,
344  "If set to true, errors thrown by iteration_callback will be ignored."}},
345  {"ignore_check_vec",
346  {OT_BOOL,
347  "If set to true, the input shape of F will not be checked."}},
348  {"warn_initial_bounds",
349  {OT_BOOL,
350  "Warn if the initial guess does not satisfy LBX and UBX"}},
351  {"eval_errors_fatal",
352  {OT_BOOL,
353  "When errors occur during evaluation of f,g,...,"
354  "stop the iterations"}},
355  {"verbose_init",
356  {OT_BOOL,
357  "Print out timing information about "
358  "the different stages of initialization"}},
359  {"discrete",
360  {OT_BOOLVECTOR,
361  "Indicates which of the variables are discrete, i.e. integer-valued"}},
362  {"equality",
363  {OT_BOOLVECTOR,
364  "Indicate an upfront hint which of the constraints are equalities. "
365  "Some solvers may be able to exploit this knowledge. "
366  "When true, the corresponding lower and upper bounds are assumed equal. "
367  "When false, the corresponding bounds may be equal or different."}},
368  {"calc_multipliers",
369  {OT_BOOL,
370  "Calculate Lagrange multipliers in the Nlpsol base class"}},
371  {"calc_lam_x",
372  {OT_BOOL,
373  "Calculate 'lam_x' in the Nlpsol base class"}},
374  {"calc_lam_p",
375  {OT_BOOL,
376  "Calculate 'lam_p' in the Nlpsol base class"}},
377  {"calc_f",
378  {OT_BOOL,
379  "Calculate 'f' in the Nlpsol base class"}},
380  {"calc_g",
381  {OT_BOOL,
382  "Calculate 'g' in the Nlpsol base class"}},
383  {"no_nlp_grad",
384  {OT_BOOL,
385  "Prevent the creation of the 'nlp_grad' function"}},
386  {"bound_consistency",
387  {OT_BOOL,
388  "Ensure that primal-dual solution is consistent with the bounds"}},
389  {"min_lam",
390  {OT_DOUBLE,
391  "Minimum allowed multiplier value"}},
392  {"oracle_options",
393  {OT_DICT,
394  "Options to be passed to the oracle function"}},
395  {"sens_linsol",
396  {OT_STRING,
397  "Linear solver used for parametric sensitivities (default 'qr')."}},
398  {"sens_linsol_options",
399  {OT_DICT,
400  "Linear solver options used for parametric sensitivities."}},
401  {"detect_simple_bounds",
402  {OT_BOOL,
403  "Automatically detect simple bounds (lbx/ubx) (default false). "
404  "This is hopefully beneficial to speed and robustness but may also have adverse affects: "
405  "1) Subtleties in heuristics and stopping criteria may change the solution, "
406  "2) IPOPT may lie about multipliers of simple equality bounds unless "
407  "'fixed_variable_treatment' is set to 'relax_bounds'."}},
408  {"detect_simple_bounds_is_simple",
409  {OT_BOOLVECTOR,
410  "For internal use only."}},
411  {"detect_simple_bounds_parts",
412  {OT_FUNCTION,
413  "For internal use only."}},
414  {"detect_simple_bounds_target_x",
415  {OT_INTVECTOR,
416  "For internal use only."}}
417  }
418  };
419 
420  void Nlpsol::init(const Dict& opts) {
421  // Read options
422  for (auto&& op : opts) {
423  if (op.first=="detect_simple_bounds_is_simple") {
424  assign_vector(op.second.to_bool_vector(), detect_simple_bounds_is_simple_);
425  //detect_simple_bounds_is_simple_ = op.second.to_bool_vector();
426  } else if (op.first=="detect_simple_bounds_parts") {
427  detect_simple_bounds_parts_ = op.second;
428  } else if (op.first=="detect_simple_bounds_target_x") {
429  detect_simple_bounds_target_x_ = op.second;
430  }
431  }
432 
433  for (casadi_int i=0;i<detect_simple_bounds_is_simple_.size();++i) {
435  detect_simple_bounds_target_g_.push_back(i);
436  }
437  }
438 
439  // Call the initialization method of the base class
440  OracleFunction::init(opts);
441 
442  // Read options
443  for (auto&& op : opts) {
444  if (op.first=="iteration_callback") {
445  fcallback_ = op.second;
446  } else if (op.first=="iteration_callback_step") {
447  callback_step_ = op.second;
448  } else if (op.first=="eval_errors_fatal") {
449  eval_errors_fatal_ = op.second;
450  } else if (op.first=="warn_initial_bounds") {
451  warn_initial_bounds_ = op.second;
452  } else if (op.first=="iteration_callback_ignore_errors") {
454  } else if (op.first=="discrete") {
455  discrete_ = op.second;
456  } else if (op.first=="equality") {
457  equality_ = op.second;
458  } else if (op.first=="calc_multipliers") {
459  calc_multipliers_ = op.second;
460  } else if (op.first=="calc_lam_x") {
461  calc_lam_x_ = op.second;
462  } else if (op.first=="calc_lam_p") {
463  calc_lam_p_ = op.second;
464  } else if (op.first=="calc_f") {
465  calc_f_ = op.second;
466  } else if (op.first=="calc_g") {
467  calc_g_ = op.second;
468  } else if (op.first=="no_nlp_grad") {
469  no_nlp_grad_ = op.second;
470  } else if (op.first=="bound_consistency") {
471  bound_consistency_ = op.second;
472  } else if (op.first=="min_lam") {
473  min_lam_ = op.second;
474  } else if (op.first=="sens_linsol") {
475  sens_linsol_ = op.second.to_string();
476  } else if (op.first=="sens_linsol_options") {
477  sens_linsol_options_ = op.second;
478  }
479  }
480 
481  // Deprecated option
482  if (calc_multipliers_) {
483  calc_lam_x_ = true;
484  calc_lam_p_ = true;
485  }
486 
487  // Get dimensions
488  nx_ = nnz_out(NLPSOL_X);
489  np_ = nnz_in(NLPSOL_P);
491 
492  // No need to calculate non-existant quantities
493  if (np_==0) calc_lam_p_ = false;
494  if (ng_==0) calc_g_ = false;
495 
496  // Consistency check
497  if (no_nlp_grad_) {
498  casadi_assert(!calc_lam_p_, "Options 'no_nlp_grad' and 'calc_lam_p' inconsistent");
499  casadi_assert(!calc_lam_x_, "Options 'no_nlp_grad' and 'calc_lam_x' inconsistent");
500  casadi_assert(!calc_f_, "Options 'no_nlp_grad' and 'calc_f' inconsistent");
501  casadi_assert(!calc_g_, "Options 'no_nlp_grad' and 'calc_g' inconsistent");
502  }
503 
504  // Dimension checks
505  casadi_assert(sparsity_out_.at(NLPSOL_G).is_dense()
506  && sparsity_out_.at(NLPSOL_G).is_vector(),
507  "Expected a dense vector 'g', but got " + sparsity_out_.at(NLPSOL_G).dim(true) + ".");
508 
509  casadi_assert(sparsity_out_.at(NLPSOL_F).is_dense(),
510  "Expected a dense 'f', but got " + sparsity_out_.at(NLPSOL_F).dim(true) + ".");
511 
512  casadi_assert(sparsity_out_.at(NLPSOL_X).is_dense()
513  && sparsity_out_.at(NLPSOL_X).is_vector(),
514  "Expected a dense vector 'x', but got " + sparsity_out_.at(NLPSOL_X).dim(true) + ".");
515 
516  // Discrete marker
517  mi_ = false;
518  if (!discrete_.empty()) {
519  casadi_assert(discrete_.size()==nx_, "\"discrete\" option has wrong length");
520  if (std::find(discrete_.begin(), discrete_.end(), true)!=discrete_.end()) {
521  casadi_assert(integer_support(),
522  "Discrete variables require a solver with integer support");
523  mi_ = true;
524  }
525  }
526  if (!equality_.empty()) {
527  casadi_assert(equality_.size()==ng_, "\"equality\" option has wrong length. "
528  "Expected " + str(ng_) + " elements, but got " +
529  str(equality_.size()) + " instead.");
530  }
531 
532  set_nlpsol_prob();
533 
534  // Allocate memory
535  casadi_int sz_arg, sz_res, sz_w, sz_iw;
536  casadi_nlpsol_work(&p_nlp_, &sz_arg, &sz_res, &sz_iw, &sz_w);
537  alloc_arg(sz_arg, true);
538  alloc_res(sz_res, true);
539  alloc_iw(sz_iw, true);
540  alloc_w(sz_w, true);
541 
542  if (!fcallback_.is_null()) {
543  // Consistency checks
544  casadi_assert_dev(!fcallback_.is_null());
545  casadi_assert(fcallback_.n_out()==1 && fcallback_.numel_out()==1,
546  "Callback function must return a scalar.");
547  casadi_assert(fcallback_.n_in()==n_out_,
548  "Callback input signature must match the NLP solver output signature");
549  for (casadi_int i=0; i<n_out_; ++i) {
550  // Ignore empty arguments
551  if (fcallback_.sparsity_in(i).is_empty()) continue;
552  casadi_assert(fcallback_.size_in(i)==size_out(i),
553  "Callback function input size mismatch. For argument '" + nlpsol_out(i) + "', "
554  "callback has shape " + fcallback_.sparsity_in(i).dim() + " while NLP has " +
555  sparsity_out_.at(i).dim() + ".");
556  // TODO(@jaeandersson): Wrap fcallback_ in a function with correct sparsity
557  casadi_assert(fcallback_.sparsity_in(i)==sparsity_out_.at(i),
558  "Callback function input size mismatch. "
559  "For argument " + nlpsol_out(i) + "', callback has shape " +
560  fcallback_.sparsity_in(i).dim() + " while NLP has " +
561  sparsity_out_.at(i).dim() + ".");
562  }
563 
564  // Allocate temporary memory
565  alloc(fcallback_);
566  }
567 
568  // Function calculating f, g and the gradient of the Lagrangian w.r.t. x and p
569  if (!no_nlp_grad_) {
570  create_function("nlp_grad", {"x", "p", "lam:f", "lam:g"},
571  {"f", "g", "grad:gamma:x", "grad:gamma:p"},
572  {{"gamma", {"f", "g"}}});
573  }
574  }
575 
576  int detect_bounds_callback(const double** arg, double** res,
577  casadi_int* iw, double* w, void* callback_data) {
578  Function* f = static_cast<Function*>(callback_data);
579  return f->operator()(arg, res, iw, w);
580  }
581 
582  void Nlpsol::set_nlpsol_prob() {
583  p_nlp_.nx = nx_;
584  p_nlp_.ng = ng_;
585  p_nlp_.np = np_;
586 
590 
591  if (p_nlp_.detect_bounds.ng) {
600  }
601  }
602 
603  int Nlpsol::init_mem(void* mem) const {
604  if (OracleFunction::init_mem(mem)) return 1;
605  auto m = static_cast<NlpsolMemory*>(mem);
606  m->add_stat("callback_fun");
607  m->success = false;
608  m->d_nlp.prob = nullptr;
609  m->unified_return_status = SOLVER_RET_UNKNOWN;
610  return 0;
611  }
612 
613  void Nlpsol::check_inputs(void* mem) const {
614  auto m = static_cast<NlpsolMemory*>(mem);
615  auto d_nlp = &m->d_nlp;
616 
617  // Skip check?
618  if (!inputs_check_) return;
619 
620  const double inf = std::numeric_limits<double>::infinity();
621 
622  // Number of equality constraints
623  casadi_int n_eq = 0;
624 
625  // Detect ill-posed problems (simple bounds)
626  for (casadi_int i=0; i<nx_; ++i) {
627  double lb = d_nlp->lbx ? d_nlp->lbx[i] : get_default_in(NLPSOL_LBX);
628  double ub = d_nlp->ubx ? d_nlp->ubx[i] : get_default_in(NLPSOL_UBX);
629  double x0 = d_nlp->x0 ? d_nlp->x0[i] : get_default_in(NLPSOL_X0);
630  casadi_assert(lb <= ub && lb!=inf && ub!=-inf,
631  "Ill-posed problem detected: "
632  "LBX[" + str(i) + "] <= UBX[" + str(i) + "] was violated. "
633  "Got LBX[" + str(i) + "]=" + str(lb) + " and UBX[" + str(i) + "] = " + str(ub) + ".");
634  if (warn_initial_bounds_ && (x0>ub || x0<lb)) {
635  casadi_warning("Nlpsol: The initial guess does not satisfy LBX and UBX. "
636  "Option 'warn_initial_bounds' controls this warning.");
637  break;
638  }
639  if (lb==ub) n_eq++;
640  }
641 
642  // Detect ill-posed problems (nonlinear bounds)
643  for (casadi_int i=0; i<nnz_out(NLPSOL_G); ++i) {
644  double lb = d_nlp->lbg ? d_nlp->lbg[i] : get_default_in(NLPSOL_LBG);
645  double ub = d_nlp->ubg ? d_nlp->ubg[i] : get_default_in(NLPSOL_UBG);
646  casadi_assert(lb <= ub && lb!=inf && ub!=-inf,
647  "Ill-posed problem detected: "
648  "LBG[" + str(i) + "] <= UBG[" + str(i) + "] was violated. "
649  "Got LBG[" + str(i) + "] = " + str(lb) + " and UBG[" + str(i) + "] = " + str(ub) + ".");
650  if (lb==ub) n_eq++;
651  }
652 
653  // Make sure enough degrees of freedom
654  using casadi::str; // Workaround, MingGW bug, cf. CasADi issue #890
655  if (n_eq> nx_) {
656  casadi_warning("NLP is overconstrained: There are " + str(n_eq) +
657  " equality constraints but only " + str(nx_) + " variables.");
658  }
659  }
660 
661  std::map<std::string, Nlpsol::Plugin> Nlpsol::solvers_;
662 
663 #ifdef CASADI_WITH_THREADSAFE_SYMBOLICS
664  std::mutex Nlpsol::mutex_solvers_;
665 #endif // CASADI_WITH_THREADSAFE_SYMBOLICS
666 
667  const std::string Nlpsol::infix_ = "nlpsol";
668 
670  casadi_error("getReducedHessian not defined for class " + class_name());
671  return DM();
672  }
673 
674  void Nlpsol::setOptionsFromFile(const std::string & file) {
675  casadi_error("setOptionsFromFile not defined for class " + class_name());
676  }
677 
678  void Nlpsol::bound_consistency(casadi_int n, double* z, double* lam,
679  const double* lbz, const double* ubz) {
680  casadi_assert_dev(z!=nullptr);
681  casadi_assert_dev(lam!=nullptr);
682  casadi_assert_dev(lbz!=nullptr);
683  casadi_assert_dev(ubz!=nullptr);
684  // Local variables
685  casadi_int i;
686  // Loop over variables
687  for (i=0; i<n; ++i) {
688  // Make sure bounds are respected
689  z[i] = std::fmin(std::fmax(z[i], lbz[i]), ubz[i]);
690  // Adjust multipliers
691  if (std::isinf(lbz[i]) && std::isinf(ubz[i])) {
692  // Both multipliers are infinite
693  lam[i] = 0.;
694  } else if (std::isinf(lbz[i]) || z[i] - lbz[i] > ubz[i] - z[i]) {
695  // Infinite lower bound or closer to upper bound than lower bound
696  lam[i] = std::fmax(0., lam[i]);
697  } else if (std::isinf(ubz[i]) || z[i] - lbz[i] < ubz[i] - z[i]) {
698  // Infinite upper bound or closer to lower bound than upper bound
699  lam[i] = std::fmin(0., lam[i]);
700  }
701  }
702  }
703 
704  int Nlpsol::eval(const double** arg, double** res, casadi_int* iw, double* w, void* mem) const {
705  auto m = static_cast<NlpsolMemory*>(mem);
706 
707  auto d_nlp = &m->d_nlp;
708 
709  // Reset the solver, prepare for solution
710  setup(m, arg, res, iw, w);
711  auto p_nlp = d_nlp->prob;
712 
713  // Set initial guess
714  casadi_copy(d_nlp->x0, nx_, d_nlp->z);
715 
716  // Read simple bounds and multiplier guesses
717  casadi_copy(d_nlp->lbx, nx_, d_nlp->lbz);
718  casadi_copy(d_nlp->ubx, nx_, d_nlp->ubz);
719  casadi_copy(d_nlp->lam_x0, nx_, d_nlp->lam);
720 
721  if (p_nlp->detect_bounds.ng==0) {
722  // Read constraint bounds and multiplier guesses
723  casadi_copy(d_nlp->lbg, ng_, d_nlp->lbz+nx_);
724  casadi_copy(d_nlp->ubg, ng_, d_nlp->ubz+nx_);
725  casadi_copy(d_nlp->lam_g0, ng_, d_nlp->lam+nx_);
726  } else {
727  if (casadi_detect_bounds_before(d_nlp)) return 1;
728  }
729 
730  // Set multipliers to nan
731  casadi_fill(d_nlp->lam_p, np_, nan);
732 
733  // Reset f, g
734  d_nlp->objective = nan;
735  casadi_fill(d_nlp->z + nx_, ng_, nan);
736 
737  // Check the provided inputs
738  check_inputs(m);
739 
740  // Solve the NLP
741  int flag = solve(m);
742 
743  // Join statistics (introduced for parallel oracle facilities)
744  join_results(m);
745 
746  // Calculate multiplers
747  if ((calc_f_ || calc_g_ || calc_lam_x_ || calc_lam_p_) && !flag) {
748  const double lam_f = 1.;
749  m->arg[0] = d_nlp->z;
750  m->arg[1] = d_nlp->p;
751  m->arg[2] = &lam_f;
752  m->arg[3] = d_nlp->lam + nx_;
753  m->res[0] = calc_f_ ? &d_nlp->objective : nullptr;
754  m->res[1] = calc_g_ ? d_nlp->z + nx_ : nullptr;
755  m->res[2] = calc_lam_x_ ? d_nlp->lam : nullptr;
756  m->res[3] = calc_lam_p_ ? d_nlp->lam_p : nullptr;
757  if (calc_function(m, "nlp_grad")) {
758  casadi_warning("Failed to calculate multipliers");
759  }
760  if (calc_lam_x_) casadi_scal(nx_, -1., d_nlp->lam);
761  if (calc_lam_p_) casadi_scal(np_, -1., d_nlp->lam_p);
762  }
763 
764  // Make sure that an optimal solution is consistant with bounds
765  if (bound_consistency_ && !flag) {
766  bound_consistency(nx_+ng_, d_nlp->z, d_nlp->lam, d_nlp->lbz, d_nlp->ubz);
767  }
768 
769  // Get optimal solution
770  casadi_copy(d_nlp->z, nx_, d_nlp->x);
771 
772  if (p_nlp->detect_bounds.ng==0) {
773  casadi_copy(d_nlp->z + nx_, ng_, d_nlp->g);
774  casadi_copy(d_nlp->lam, nx_, d_nlp->lam_x);
775  casadi_copy(d_nlp->lam + nx_, ng_, d_nlp->lam_g);
776  } else {
777  if (casadi_detect_bounds_after(d_nlp)) return 1;
778  }
779 
780  casadi_copy(d_nlp->lam_p, np_, d_nlp->lam_p);
781  casadi_copy(&d_nlp->objective, 1, d_nlp->f);
782 
783  if (m->success) m->unified_return_status = SOLVER_RET_SUCCESS;
784 
785  if (error_on_fail_ && !m->success)
786  casadi_error("nlpsol process failed. "
787  "Set 'error_on_fail' option to false to ignore this error.");
788 
789  if (m->unified_return_status==SOLVER_RET_EXCEPTION) {
790  casadi_error("An exception was raised in the solver.");
791  }
792  return flag;
793  }
794 
795  void Nlpsol::set_work(void* mem, const double**& arg, double**& res,
796  casadi_int*& iw, double*& w) const {
797  auto m = static_cast<NlpsolMemory*>(mem);
798 
799  // Problem has not been solved at this point
800  m->success = false;
801  m->unified_return_status = SOLVER_RET_UNKNOWN;
802 
803  m->d_nlp.prob = &p_nlp_;
804  m->d_nlp.oracle = &m->d_oracle;
805 
806  casadi_nlpsol_data<double>& d_nlp = m->d_nlp;
807  d_nlp.p = arg[NLPSOL_P];
808  d_nlp.lbx = arg[NLPSOL_LBX];
809  d_nlp.ubx = arg[NLPSOL_UBX];
810  d_nlp.lbg = arg[NLPSOL_LBG];
811  d_nlp.ubg = arg[NLPSOL_UBG];
812  d_nlp.x0 = arg[NLPSOL_X0];
813  d_nlp.lam_x0 = arg[NLPSOL_LAM_X0];
814  d_nlp.lam_g0 = arg[NLPSOL_LAM_G0];
815 
816  d_nlp.x = res[NLPSOL_X];
817  d_nlp.f = res[NLPSOL_F];
818  d_nlp.g = res[NLPSOL_G];
819  d_nlp.lam_x = res[NLPSOL_LAM_X];
820  d_nlp.lam_g = res[NLPSOL_LAM_G];
821  d_nlp.lam_p = res[NLPSOL_LAM_P];
822 
823 
824  arg += NLPSOL_NUM_IN;
825  res += NLPSOL_NUM_OUT;
826 
827  casadi_nlpsol_init(&m->d_nlp, &arg, &res, &iw, &w);
828  }
829 
830  std::vector<std::string> nlpsol_options(const std::string& name) {
831  return Nlpsol::plugin_options(name).all();
832  }
833 
834  std::string nlpsol_option_type(const std::string& name, const std::string& op) {
835  return Nlpsol::plugin_options(name).type(op);
836  }
837 
838  std::string nlpsol_option_info(const std::string& name, const std::string& op) {
839  return Nlpsol::plugin_options(name).info(op);
840  }
841 
842  void Nlpsol::disp_more(std::ostream& stream) const {
843  stream << "minimize f(x;p) subject to lbx<=x<=ubx, lbg<=g(x;p)<=ubg defined by:\n";
844  oracle_.disp(stream, true);
845  }
846 
848 #ifdef CASADI_WITH_THREADSAFE_SYMBOLICS
849  // Safe access to kkt_
850  std::lock_guard<std::mutex> lock(kkt_mtx_);
851 #endif // CASADI_WITH_THREADSAFE_SYMBOLICS
852  // Quick return if cached
853  SharedObject temp;
854  if (kkt_.shared_if_alive(temp)) {
855  return shared_cast<Function>(temp);
856  }
857 
858  // Generate KKT function
859  Function ret = oracle_.factory("kkt", {"x", "p", "lam:f", "lam:g"},
860  {"jac:g:x", "hess:gamma:x:x"}, {{"gamma", {"f", "g"}}});
861 
862  // Cache and return
863  kkt_ = ret;
864  return ret;
865  }
866 
867 
869  get_forward(casadi_int nfwd, const std::string& name,
870  const std::vector<std::string>& inames,
871  const std::vector<std::string>& onames,
872  const Dict& opts) const {
873  casadi_assert(detect_simple_bounds_is_simple_.empty(),
874  "Simple bound detection not compatible with get_forward");
875 
876  // Symbolic expression for the input
877  std::vector<MX> arg = mx_in(), res = mx_out();
878 
879  // Initial guesses not used for derivative calculations
881  std::string name = arg[i].is_symbolic() ? arg[i].name() : "tmp_get_forward";
882  arg[i] = MX::sym(name, Sparsity(arg[i].size()));
883  }
884 
885  // Optimal solution
886  MX x = res[NLPSOL_X];
887  MX lam_g = res[NLPSOL_LAM_G];
888  MX lam_x = res[NLPSOL_LAM_X];
889  MX lam_p = res[NLPSOL_LAM_P];
890  MX f = res[NLPSOL_F];
891  MX g = res[NLPSOL_G];
892 
893  // Inputs used
894  MX lbx = arg[NLPSOL_LBX];
895  MX ubx = arg[NLPSOL_UBX];
896  MX lbg = arg[NLPSOL_LBG];
897  MX ubg = arg[NLPSOL_UBG];
898  MX p = arg[NLPSOL_P];
899 
900  // Get KKT function
901  Function kkt = this->kkt();
902 
903  // Hessian of the Lagrangian, Jacobian of the constraints
904  std::vector<MX> HJ_res = kkt({x, p, 1, lam_g});
905  MX JG = HJ_res.at(0);
906  MX HL = HJ_res.at(1);
907 
908  // Active set (assumed known and given by the multiplier signs)
909  MX ubIx = lam_x > min_lam_;
910  MX lbIx = lam_x < -min_lam_;
911  MX bIx = ubIx + lbIx;
912  MX iIx = 1-bIx;
913  MX ubIg = lam_g > min_lam_;
914  MX lbIg = lam_g < -min_lam_;
915  MX bIg = ubIg + lbIg;
916  MX iIg = 1-bIg;
917 
918  // KKT matrix
919  MX H_11 = mtimes(diag(iIx), HL) + diag(bIx);
920  MX H_12 = mtimes(diag(iIx), JG.T());
921  MX H_21 = mtimes(diag(bIg), JG);
922  MX H_22 = diag(-iIg);
923  MX H = MX::blockcat({{H_11, H_12}, {H_21, H_22}});
924 
925  // Sensitivity inputs
926  std::vector<MX> fseed(NLPSOL_NUM_IN);
927  MX fwd_lbx = fseed[NLPSOL_LBX] = MX::sym("fwd_lbx", repmat(x.sparsity(), 1, nfwd));
928  MX fwd_ubx = fseed[NLPSOL_UBX] = MX::sym("fwd_ubx", repmat(x.sparsity(), 1, nfwd));
929  MX fwd_lbg = fseed[NLPSOL_LBG] = MX::sym("fwd_lbg", repmat(g.sparsity(), 1, nfwd));
930  MX fwd_ubg = fseed[NLPSOL_UBG] = MX::sym("fwd_ubg", repmat(g.sparsity(), 1, nfwd));
931  MX fwd_p = fseed[NLPSOL_P] = MX::sym("fwd_p", repmat(p.sparsity(), 1, nfwd));
932 
933  // Guesses are unused
935  fseed[i] = MX(repmat(Sparsity(arg[i].size()), 1, nfwd));
936  }
937 
938  // nlp_grad has the signature
939  // (x, p, lam_f, lam_g) -> (f, g, grad_x, grad_p)
940  // with lam_f=1 and lam_g=lam_g, grad_x = -lam_x, grad_p=-lam_p
941  Function nlp_grad = get_function("nlp_grad");
942 
943  // fwd_nlp_grad has the signature
944  // (x, p, lam_f, lam_g, f, g, grad_x, grad_p,
945  // fwd_x, fwd_p, fwd_lam_f, fwd_lam_g)
946  // -> (fwd_f, fwd_g, fwd_grad_x, fwd_grad_p)
947  Function fwd_nlp_grad = nlp_grad.forward(nfwd);
948 
949  // Calculate sensitivities from fwd_p
950  std::vector<MX> vv = {x, p, 1, lam_g, f, g, -lam_x, -lam_p, 0., fwd_p, 0., 0.};
951  vv = fwd_nlp_grad(vv);
952  MX fwd_g_p = vv.at(1);
953  MX fwd_gL_p = vv.at(2);
954 
955  // Propagate forward seeds
956  MX fwd_alpha_x = (if_else(lbIx, fwd_lbx, 0) + if_else(ubIx, fwd_ubx, 0))
957  - if_else(iIx, fwd_gL_p, 0);
958  MX fwd_alpha_g = (if_else(ubIg, fwd_ubg, 0) + if_else(lbIg, fwd_lbg, 0))
959  - if_else(bIg, fwd_g_p, 0);
960  MX v = MX::vertcat({fwd_alpha_x, fwd_alpha_g});
961 
962  // Solve
964 
965  // Extract sensitivities in x, lam_x and lam_g
966  std::vector<MX> v_split = vertsplit(v, {0, nx_, nx_+ng_});
967  MX fwd_x = v_split.at(0);
968  MX fwd_lam_g = v_split.at(1);
969 
970  // Calculate sensitivities in lam_x, lam_g
971  vv = {x, p, 1, lam_g, f, g, -lam_x, -lam_p,
972  fwd_x, fwd_p, 0, fwd_lam_g};
973  vv = fwd_nlp_grad(vv);
974  MX fwd_f = vv.at(0);
975  MX fwd_g = vv.at(1);
976  MX fwd_lam_x = -vv.at(2);
977  MX fwd_lam_p = -vv.at(3);
978 
979  // Forward sensitivities
980  std::vector<MX> fsens(NLPSOL_NUM_OUT);
981  fsens[NLPSOL_X] = fwd_x;
982  fsens[NLPSOL_F] = fwd_f;
983  fsens[NLPSOL_G] = fwd_g;
984  fsens[NLPSOL_LAM_X] = fwd_lam_x;
985  fsens[NLPSOL_LAM_G] = fwd_lam_g;
986  fsens[NLPSOL_LAM_P] = fwd_lam_p;
987 
988  // Gather return values
989  arg.insert(arg.end(), res.begin(), res.end());
990  arg.insert(arg.end(), fseed.begin(), fseed.end());
991  res = fsens;
992 
993  Dict options = opts;
994  options["allow_duplicate_io_names"] = true;
995 
996  return Function(name, arg, res, inames, onames, options);
997  }
998 
1000  get_reverse(casadi_int nadj, const std::string& name,
1001  const std::vector<std::string>& inames,
1002  const std::vector<std::string>& onames,
1003  const Dict& opts) const {
1004  casadi_assert(detect_simple_bounds_is_simple_.empty(),
1005  "Simple bound detection not compatible with get_reverse");
1006 
1007  // Symbolic expression for the input
1008  std::vector<MX> arg = mx_in(), res = mx_out();
1009 
1010  // Initial guesses not used for derivative calculations
1012  std::string name = arg[i].is_symbolic() ? arg[i].name() : "tmp_get_reverse";
1013  arg[i] = MX::sym(name, Sparsity(arg[i].size()));
1014  }
1015 
1016  // Optimal solution
1017  MX x = res[NLPSOL_X];
1018  MX lam_g = res[NLPSOL_LAM_G];
1019  MX lam_x = res[NLPSOL_LAM_X];
1020  MX lam_p = res[NLPSOL_LAM_P];
1021  MX f = res[NLPSOL_F];
1022  MX g = res[NLPSOL_G];
1023 
1024  // Inputs used
1025  MX lbx = arg[NLPSOL_LBX];
1026  MX ubx = arg[NLPSOL_UBX];
1027  MX lbg = arg[NLPSOL_LBG];
1028  MX ubg = arg[NLPSOL_UBG];
1029  MX p = arg[NLPSOL_P];
1030 
1031  // Get KKT function
1032  Function kkt = this->kkt();
1033 
1034  // Hessian of the Lagrangian, Jacobian of the constraints
1035  std::vector<MX> HJ_res = kkt({x, p, 1, lam_g});
1036  MX JG = HJ_res.at(0);
1037  MX HL = HJ_res.at(1);
1038 
1039  // Active set (assumed known and given by the multiplier signs)
1040  MX ubIx = lam_x > min_lam_;
1041  MX lbIx = lam_x < -min_lam_;
1042  MX bIx = ubIx + lbIx;
1043  MX iIx = 1-bIx;
1044  MX ubIg = lam_g > min_lam_;
1045  MX lbIg = lam_g < -min_lam_;
1046  MX bIg = ubIg + lbIg;
1047  MX iIg = 1-bIg;
1048 
1049  // KKT matrix
1050  MX H_11 = mtimes(diag(iIx), HL) + diag(bIx);
1051  MX H_12 = mtimes(diag(iIx), JG.T());
1052  MX H_21 = mtimes(diag(bIg), JG);
1053  MX H_22 = diag(-iIg);
1054  MX H = MX::blockcat({{H_11, H_12}, {H_21, H_22}});
1055 
1056  // Sensitivity inputs
1057  std::vector<MX> aseed(NLPSOL_NUM_OUT);
1058  MX adj_x = aseed[NLPSOL_X] = MX::sym("adj_x", repmat(x.sparsity(), 1, nadj));
1059  MX adj_lam_g = aseed[NLPSOL_LAM_G] = MX::sym("adj_lam_g", repmat(g.sparsity(), 1, nadj));
1060  MX adj_lam_x = aseed[NLPSOL_LAM_X] = MX::sym("adj_lam_x", repmat(x.sparsity(), 1, nadj));
1061  MX adj_lam_p = aseed[NLPSOL_LAM_P] = MX::sym("adj_lam_p", repmat(p.sparsity(), 1, nadj));
1062  MX adj_f = aseed[NLPSOL_F] = MX::sym("adj_f", Sparsity::dense(1, nadj));
1063  MX adj_g = aseed[NLPSOL_G] = MX::sym("adj_g", repmat(g.sparsity(), 1, nadj));
1064 
1065  // nlp_grad has the signature
1066  // (x, p, lam_f, lam_g) -> (f, g, grad_x, grad_p)
1067  // with lam_f=1 and lam_g=lam_g, grad_x = -lam_x, grad_p=-lam_p
1068  Function nlp_grad = get_function("nlp_grad");
1069 
1070  // rev_nlp_grad has the signature
1071  // (x, p, lam_f, lam_g, f, g, grad_x, grad_p,
1072  // adj_f, adj_g, adj_grad_x, adj_grad_p)
1073  // -> (adj_x, adj_p, adj_lam_f, adj_lam_g)
1074  Function rev_nlp_grad = nlp_grad.reverse(nadj);
1075 
1076  // Calculate sensitivities from f, g and lam_x
1077  std::vector<MX> vv = {x, p, 1, lam_g, f, g, -lam_x, -lam_p,
1078  adj_f, adj_g, -adj_lam_x, -adj_lam_p};
1079  vv = rev_nlp_grad(vv);
1080  MX adj_x0 = vv.at(0);
1081  MX adj_p0 = vv.at(1);
1082  MX adj_lam_g0 = vv.at(3);
1083 
1084  // Solve to get beta_x_bar, beta_g_bar
1085  MX v = MX::vertcat({adj_x + adj_x0, adj_lam_g + adj_lam_g0});
1087  std::vector<MX> v_split = vertsplit(v, {0, nx_, nx_+ng_});
1088  MX beta_x_bar = v_split.at(0);
1089  MX beta_g_bar = v_split.at(1);
1090 
1091  // Calculate sensitivities in p
1092  vv = {x, p, 1, lam_g, f, g, -lam_x, -lam_p,
1093  0, bIg*beta_g_bar, iIx*beta_x_bar, 0};
1094  vv = rev_nlp_grad(vv);
1095  MX adj_p = vv.at(1);
1096 
1097  // Reverse sensitivities
1098  std::vector<MX> asens(NLPSOL_NUM_IN);
1099  asens[NLPSOL_UBX] = if_else(ubIx, beta_x_bar, 0);
1100  asens[NLPSOL_LBX] = if_else(lbIx, beta_x_bar, 0);
1101  asens[NLPSOL_UBG] = if_else(ubIg, beta_g_bar, 0);
1102  asens[NLPSOL_LBG] = if_else(lbIg, beta_g_bar, 0);
1103  asens[NLPSOL_P] = adj_p0 - adj_p;
1104 
1105  // Guesses are unused
1107  asens[i] = MX(repmat(Sparsity(arg[i].size()), 1, nadj));
1108  }
1109 
1110  // Gather return values
1111  arg.insert(arg.end(), res.begin(), res.end());
1112  arg.insert(arg.end(), aseed.begin(), aseed.end());
1113  res = asens;
1114 
1115  Dict options = opts;
1116  options["allow_duplicate_io_names"] = true;
1117 
1118  return Function(name, arg, res, inames, onames, options);
1119  }
1120 
1122  // Quick return if no callback function
1123  if (fcallback_.is_null()) return 0;
1124  // Callback inputs
1125  std::fill_n(m->arg, fcallback_.n_in(), nullptr);
1126 
1127  auto d_nlp = &m->d_nlp;
1128 
1129  m->arg[NLPSOL_X] = d_nlp->z;
1130  m->arg[NLPSOL_F] = &d_nlp->objective;
1131  m->arg[NLPSOL_G] = d_nlp->z + nx_;
1132  m->arg[NLPSOL_LAM_G] = d_nlp->lam + nx_;
1133  m->arg[NLPSOL_LAM_X] = d_nlp->lam;
1134 
1135  // Callback outputs
1136  std::fill_n(m->res, fcallback_.n_out(), nullptr);
1137  double ret = 0;
1138  m->res[0] = &ret;
1139 
1140  // Start timer
1141  m->fstats.at("callback_fun").tic();
1142  try {
1143  // Evaluate
1144  fcallback_(m->arg, m->res, m->iw, m->w, 0);
1145  } catch(KeyboardInterruptException& ex) {
1146  (void)ex; // unused
1147  throw;
1148  } catch(std::exception& ex) {
1149  print("WARNING: intermediate_callback error: %s\n", ex.what());
1151  }
1152 
1153  // User user interruption?
1154  if (static_cast<casadi_int>(ret)) return 1;
1155 
1156  // Stop timer
1157  m->fstats.at("callback_fun").toc();
1158 
1159  return 0;
1160  }
1161 
1162  Dict Nlpsol::get_stats(void* mem) const {
1163  Dict stats = OracleFunction::get_stats(mem);
1164  auto m = static_cast<NlpsolMemory*>(mem);
1165  casadi_assert(m->d_nlp.prob,
1166  "No stats available: nlp Solver instance has not yet been called with numerical arguments.");
1167  auto d_nlp = &m->d_nlp;
1168  stats["success"] = m->success;
1169  stats["unified_return_status"] = string_from_UnifiedReturnStatus(m->unified_return_status);
1170  if (d_nlp->prob && d_nlp->prob->detect_bounds.ng) {
1171  std::vector<bool> is_simple;
1173  stats["detect_simple_bounds_is_simple"] = is_simple;
1174  stats["detect_simple_bounds_target_x"] = detect_simple_bounds_target_x_;
1175  }
1176  return stats;
1177  }
1178 
1181  g.local("d_nlp", "struct casadi_nlpsol_data");
1182  g.local("p_nlp", "struct casadi_nlpsol_prob");
1183 
1184  g << "d_nlp.oracle = &d_oracle;\n";
1185 
1186  g << "d_nlp.p = arg[" << NLPSOL_P << "];\n";
1187  g << "d_nlp.lbx = arg[" << NLPSOL_LBX << "];\n";
1188  g << "d_nlp.ubx = arg[" << NLPSOL_UBX << "];\n";
1189  g << "d_nlp.lbg = arg[" << NLPSOL_LBG << "];\n";
1190  g << "d_nlp.ubg = arg[" << NLPSOL_UBG << "];\n";
1191  g << "d_nlp.x0 = arg[" << NLPSOL_X0 << "];\n";
1192  g << "d_nlp.lam_x0 = arg[" << NLPSOL_LAM_X0 << "];\n";
1193  g << "d_nlp.lam_g0 = arg[" << NLPSOL_LAM_G0 << "];\n";
1194 
1195  g << "d_nlp.x = res[" << NLPSOL_X << "];\n";
1196  g << "d_nlp.f = res[" << NLPSOL_F << "];\n";
1197  g << "d_nlp.g = res[" << NLPSOL_G << "];\n";
1198  g << "d_nlp.lam_x = res[" << NLPSOL_LAM_X << "];\n";
1199  g << "d_nlp.lam_g = res[" << NLPSOL_LAM_G << "];\n";
1200  g << "d_nlp.lam_p = res[" << NLPSOL_LAM_P << "];\n";
1201 
1202  g << "d_nlp.prob = &p_nlp;\n";
1203  g << "p_nlp.nx = " << nx_ << ";\n";
1204  g << "p_nlp.ng = " << ng_ << ";\n";
1205  g << "p_nlp.np = " << np_ << ";\n";
1206  g << "p_nlp.detect_bounds.ng = " << detect_simple_bounds_is_simple_.size() << ";\n";
1207  if (detect_simple_bounds_is_simple_.size()) {
1208 
1209 
1210  g << "p_nlp.detect_bounds.sz_arg = " << detect_simple_bounds_parts_.sz_arg() << ";\n";
1211  g << "p_nlp.detect_bounds.sz_res = " << detect_simple_bounds_parts_.sz_res() << ";\n";
1212  g << "p_nlp.detect_bounds.sz_iw = " << detect_simple_bounds_parts_.sz_iw() << ";\n";
1213  g << "p_nlp.detect_bounds.sz_w = " << detect_simple_bounds_parts_.sz_w() << ";\n";
1214 
1215  g << "p_nlp.detect_bounds.nb = " << detect_simple_bounds_target_x_.size() << ";\n";
1216  g << "p_nlp.detect_bounds.target_x = "
1218  g << "p_nlp.detect_bounds.target_g = "
1220  g << "p_nlp.detect_bounds.is_simple = "
1222  std::string w =
1223  g.shorthand(g.wrapper(detect_simple_bounds_parts_, "detect_simple_bounds_wrapper"));
1224  g << "p_nlp.detect_bounds.callback = " << w << ";\n";
1225  g << "p_nlp.detect_bounds.callback_data = 0;\n";
1226  }
1227  g << "casadi_nlpsol_init(&d_nlp, &arg, &res, &iw, &w);\n";
1228 
1229  // Set initial guess
1230  g.copy_default("d_nlp.x0", nx_, "d_nlp.z", "0", false);
1231 
1232  // Read simple bounds and multiplier guesses
1233  g.copy_default("d_nlp.lbx", nx_, "d_nlp.lbz", "-casadi_inf", false);
1234  g.copy_default("d_nlp.ubx", nx_, "d_nlp.ubz", "casadi_inf", false);
1235  g.copy_default("d_nlp.lam_x0", nx_, "d_nlp.lam", "0", false);
1236 
1237  if (detect_simple_bounds_is_simple_.empty()) {
1238  // Read constraint bounds and multiplier guesses
1239  g.copy_default("d_nlp.lbg", ng_, "d_nlp.lbz+"+str(nx_),
1240  "-casadi_inf", false);
1241  g.copy_default("d_nlp.ubg", ng_, "d_nlp.ubz+"+str(nx_),
1242  "casadi_inf", false);
1243  g.copy_default("d_nlp.lam_g0", ng_, "d_nlp.lam+"+str(nx_), "0", false);
1244  } else {
1245  g << "if (casadi_detect_bounds_before(&d_nlp)) return 1;\n";
1246  }
1247 
1248  }
1249 
1252  if (calc_f_ || calc_g_ || calc_lam_x_ || calc_lam_p_)
1253  g.add_dependency(get_function("nlp_grad"));
1254 
1255  if (detect_simple_bounds_is_simple_.size()) {
1257  std::string w =
1258  g.shorthand(g.wrapper(detect_simple_bounds_parts_, "detect_simple_bounds_wrapper"));
1259 
1260  g << "int " << w
1261  << "(const casadi_real** arg, casadi_real** res, "
1262  << "casadi_int* iw, casadi_real* w, void* callback_data) {\n";
1263  std::string flag = g(detect_simple_bounds_parts_, "arg", "res", "iw", "w");
1264  g << "return " + flag + ";\n";
1265  g << "}\n";
1266  }
1267  }
1268 
1270  if (calc_f_ || calc_g_ || calc_lam_x_ || calc_lam_p_) {
1271  g.local("one", "const casadi_real");
1272  g.init_local("one", "1");
1273  g << "d->arg[0] = d_nlp.z;\n";
1274  g << "d->arg[1] = d_nlp.p;\n";
1275  g << "d->arg[2] = &one;\n";
1276  g << "d->arg[3] = d_nlp.lam+" + str(nx_) + ";\n";
1277  g << "d->res[0] = " << (calc_f_ ? "&d_nlp.objective" : "0") << ";\n";
1278  g << "d->res[1] = " << (calc_g_ ? "d_nlp.z+" + str(nx_) : "0") << ";\n";
1279  g << "d->res[2] = " << (calc_lam_x_ ? "d_nlp.lam+" + str(nx_) : "0") << ";\n";
1280  g << "d->res[3] = " << (calc_lam_p_ ? "d_nlp.lam_p" : "0") << ";\n";
1281  std::string nlp_grad = g(get_function("nlp_grad"), "d->arg", "d->res", "d->iw", "d->w");
1282  g << "if (" + nlp_grad + ") return 1;\n";
1283  if (calc_lam_x_) g << g.scal(nx_, "-1.0", "d_nlp.lam") << "\n";
1284  if (calc_lam_p_) g << g.scal(np_, "-1.0", "d_nlp.lam_p") << "\n";
1285  }
1286  if (bound_consistency_) {
1287  g << g.bound_consistency(nx_+ng_, "d_nlp.z", "d_nlp.lam", "d_nlp.lbz", "d_nlp.ubz") << ";\n";
1288  }
1289 
1290  g << g.copy("d_nlp.z", nx_, "d_nlp.x") << "\n";
1291 
1292  if (detect_simple_bounds_is_simple_.empty()) {
1293  g << g.copy("d_nlp.z + " + str(nx_), ng_, "d_nlp.g") << "\n";
1294  g << g.copy("d_nlp.lam", nx_, "d_nlp.lam_x") << "\n";
1295  g << g.copy("d_nlp.lam + " + str(nx_), ng_, "d_nlp.lam_g") << "\n";
1296  } else {
1297  g << "if (casadi_detect_bounds_after(&d_nlp)) return 1;\n";
1298  }
1299 
1300  g.copy_check("&d_nlp.objective", 1, "d_nlp.f", false, true);
1301  g.copy_check("d_nlp.lam_p", np_, "d_nlp.lam_p", false, true);
1302 
1304  }
1305 
1308 
1309  s.version("Nlpsol", 5);
1310  s.pack("Nlpsol::nx", nx_);
1311  s.pack("Nlpsol::ng", ng_);
1312  s.pack("Nlpsol::np", np_);
1313  s.pack("Nlpsol::fcallback", fcallback_);
1314  s.pack("Nlpsol::callback_step", callback_step_);
1315  s.pack("Nlpsol::eval_errors_fatal", eval_errors_fatal_);
1316  s.pack("Nlpsol::warn_initial_bounds", warn_initial_bounds_);
1317  s.pack("Nlpsol::iteration_callback_ignore_errors", iteration_callback_ignore_errors_);
1318  s.pack("Nlpsol::calc_multipliers", calc_multipliers_);
1319  s.pack("Nlpsol::calc_lam_x", calc_lam_x_);
1320  s.pack("Nlpsol::calc_lam_p", calc_lam_p_);
1321  s.pack("Nlpsol::calc_f", calc_f_);
1322  s.pack("Nlpsol::calc_g", calc_g_);
1323  s.pack("Nlpsol::min_lam", min_lam_);
1324  s.pack("Nlpsol::bound_consistency", bound_consistency_);
1325  s.pack("Nlpsol::no_nlp_grad", no_nlp_grad_);
1326  s.pack("Nlpsol::discrete", discrete_);
1327  s.pack("Nlpsol::equality", equality_);
1328  s.pack("Nlpsol::mi", mi_);
1329  s.pack("Nlpsol::sens_linsol", sens_linsol_);
1330  s.pack("Nlpsol::sens_linsol_options", sens_linsol_options_);
1331  s.pack("Nlpsol::detect_simple_bounds_is_simple", detect_simple_bounds_is_simple_);
1332  s.pack("Nlpsol::detect_simple_bounds_parts", detect_simple_bounds_parts_);
1333  s.pack("Nlpsol::detect_simple_bounds_target_x", detect_simple_bounds_target_x_);
1334  }
1335 
1339  }
1340 
1343  }
1344 
1346  int version = s.version("Nlpsol", 1, 5);
1347  s.unpack("Nlpsol::nx", nx_);
1348  s.unpack("Nlpsol::ng", ng_);
1349  s.unpack("Nlpsol::np", np_);
1350  s.unpack("Nlpsol::fcallback", fcallback_);
1351  s.unpack("Nlpsol::callback_step", callback_step_);
1352  if (version<=2) {
1353  s.unpack("Nlpsol::error_on_fail", error_on_fail_);
1354  }
1355  s.unpack("Nlpsol::eval_errors_fatal", eval_errors_fatal_);
1356  s.unpack("Nlpsol::warn_initial_bounds", warn_initial_bounds_);
1357  s.unpack("Nlpsol::iteration_callback_ignore_errors", iteration_callback_ignore_errors_);
1358  s.unpack("Nlpsol::calc_multipliers", calc_multipliers_);
1359  s.unpack("Nlpsol::calc_lam_x", calc_lam_x_);
1360  s.unpack("Nlpsol::calc_lam_p", calc_lam_p_);
1361  s.unpack("Nlpsol::calc_f", calc_f_);
1362  s.unpack("Nlpsol::calc_g", calc_g_);
1363  s.unpack("Nlpsol::min_lam", min_lam_);
1364  s.unpack("Nlpsol::bound_consistency", bound_consistency_);
1365  s.unpack("Nlpsol::no_nlp_grad", no_nlp_grad_);
1366  s.unpack("Nlpsol::discrete", discrete_);
1367  if (version>=4) {
1368  s.unpack("Nlpsol::equality", equality_);
1369  }
1370  s.unpack("Nlpsol::mi", mi_);
1371  if (version>=2) {
1372  s.unpack("Nlpsol::sens_linsol", sens_linsol_);
1373  s.unpack("Nlpsol::sens_linsol_options", sens_linsol_options_);
1374  } else {
1375  sens_linsol_ = "qr";
1376  }
1377 
1378  if (version>=3) {
1379  s.unpack("Nlpsol::detect_simple_bounds_is_simple", detect_simple_bounds_is_simple_);
1380  s.unpack("Nlpsol::detect_simple_bounds_parts", detect_simple_bounds_parts_);
1381  if (version==4) {
1382  casadi_error("Saved detect_simple_bounds_parts changed signature");
1383  }
1384  s.unpack("Nlpsol::detect_simple_bounds_target_x", detect_simple_bounds_target_x_);
1385  }
1386  for (casadi_int i=0;i<detect_simple_bounds_is_simple_.size();++i) {
1388  detect_simple_bounds_target_g_.push_back(i);
1389  }
1390  }
1391  set_nlpsol_prob();
1392  }
1393 
1394 } // namespace casadi
const char * what() const override
Display error.
Definition: exception.hpp:90
Helper class for C code generation.
std::string add_dependency(const Function &f)
Add a function dependency.
std::string wrapper(const Function &base, const std::string &name)
std::string copy(const std::string &arg, std::size_t n, const std::string &res)
Create a copy operation.
std::string constant(const std::vector< casadi_int > &v)
Represent an array constant; adding it when new.
std::string scal(casadi_int n, const std::string &alpha, const std::string &x)
What does scal do??
std::string bound_consistency(casadi_int n, const std::string &x, const std::string &lam, const std::string &lbx, const std::string &ubx)
bound_consistency
void local(const std::string &name, const std::string &type, const std::string &ref="")
Declare a local variable.
void init_local(const std::string &name, const std::string &def)
Specify the default value for a local variable.
std::string shorthand(const std::string &name) const
Get a shorthand.
void copy_check(const std::string &arg, std::size_t n, const std::string &res, bool check_lhs=true, bool check_rhs=true)
void copy_default(const std::string &arg, std::size_t n, const std::string &res, const std::string &def, bool check_rhs=true)
void add_auxiliary(Auxiliary f, const std::vector< std::string > &inst={"casadi_real"})
Add a built-in auxiliary function.
Helper class for Serialization.
void unpack(Sparsity &e)
Reconstruct an object from the input stream.
void version(const std::string &name, int v)
void alloc_iw(size_t sz_iw, bool persistent=false)
Ensure required length of iw field.
void alloc_res(size_t sz_res, bool persistent=false)
Ensure required length of res field.
virtual const std::vector< MX > mx_in() const
Get function input(s) and output(s)
void alloc_arg(size_t sz_arg, bool persistent=false)
Ensure required length of arg field.
virtual bool is_a(const std::string &type, bool recursive) const
Check if the function is of a particular type.
bool inputs_check_
Errors are thrown if numerical values of inputs look bad.
size_t sz_res() const
Get required length of res field.
std::pair< casadi_int, casadi_int > size_out(casadi_int ind) const
Input/output dimensions.
casadi_int nnz_in() const
Number of input/output nonzeros.
std::vector< Sparsity > sparsity_out_
void serialize_type(SerializingStream &s) const override
Serialize type information.
size_t sz_w() const
Get required length of w field.
virtual const std::vector< MX > mx_out() const
Get function input(s) and output(s)
void alloc_w(size_t sz_w, bool persistent=false)
Ensure required length of w field.
casadi_int nnz_out() const
Number of input/output nonzeros.
size_t sz_arg() const
Get required length of arg field.
void setup(void *mem, const double **arg, double **res, casadi_int *iw, double *w) const
Set the (persistent and temporary) work vectors.
void alloc(const Function &f, bool persistent=false, int num_threads=1)
Ensure work vectors long enough to evaluate function.
size_t sz_iw() const
Get required length of iw field.
static std::string string_from_UnifiedReturnStatus(UnifiedReturnStatus status)
Function object.
Definition: function.hpp:60
Function forward(casadi_int nfwd) const
Get a function that calculates nfwd forward derivatives.
Definition: function.cpp:1135
size_t sz_res() const
Get required length of res field.
Definition: function.cpp:1085
const Sparsity & sparsity_out(casadi_int ind) const
Get sparsity of a given output.
Definition: function.cpp:1031
casadi_int numel_out() const
Get number of output elements.
Definition: function.cpp:863
Function reverse(casadi_int nadj) const
Get a function that calculates nadj adjoint derivatives.
Definition: function.cpp:1143
static Function create(FunctionInternal *node)
Create from node.
Definition: function.cpp:336
const Sparsity & sparsity_in(casadi_int ind) const
Get sparsity of a given input.
Definition: function.cpp:1015
size_t sz_iw() const
Get required length of iw field.
Definition: function.cpp:1087
casadi_int n_out() const
Get the number of function outputs.
Definition: function.cpp:823
casadi_int n_in() const
Get the number of function inputs.
Definition: function.cpp:819
std::vector< std::string > get_free() const
Get free variables as a string.
Definition: function.cpp:1184
size_t sz_w() const
Get required length of w field.
Definition: function.cpp:1089
size_t sz_arg() const
Get required length of arg field.
Definition: function.cpp:1083
bool has_free() const
Does the function have free variables.
Definition: function.cpp:1697
std::pair< casadi_int, casadi_int > size_in(casadi_int ind) const
Get input dimension.
Definition: function.cpp:843
Function factory(const std::string &name, const std::vector< std::string > &s_in, const std::vector< std::string > &s_out, const AuxOut &aux=AuxOut(), const Dict &opts=Dict()) const
Definition: function.cpp:1812
static MX sym(const std::string &name, casadi_int nrow=1, casadi_int ncol=1)
Create an nrow-by-ncol symbolic primitive.
bool is_null() const
Is a null pointer?
bool shared_if_alive(Shared &shared) const
Thread-safe alternative to alive()/shared()
Importer.
Definition: importer.hpp:86
MX - Matrix expression.
Definition: mx.hpp:92
const Sparsity & sparsity() const
Get the sparsity pattern.
Definition: mx.cpp:592
static MX blockcat(const std::vector< std::vector< MX > > &v)
Definition: mx.cpp:1197
MX T() const
Transpose the matrix.
Definition: mx.cpp:1029
static MX solve(const MX &a, const MX &b)
Definition: mx.cpp:2007
static MX vertcat(const std::vector< MX > &x)
Definition: mx.cpp:1099
A symbolic NLP representation.
Definition: nlp_builder.hpp:40
std::vector< MX > x
Variables.
Definition: nlp_builder.hpp:49
std::vector< MX > g
Constraints.
Definition: nlp_builder.hpp:55
MX f
Objective.
Definition: nlp_builder.hpp:52
void serialize_type(SerializingStream &s) const override
Serialize type information.
Definition: nlpsol.cpp:1336
Nlpsol(const std::string &name, const Function &oracle)
Constructor.
Definition: nlpsol.cpp:267
bool iteration_callback_ignore_errors_
Options.
Definition: nlpsol_impl.hpp:95
WeakRef kkt_
Cache for KKT function.
void codegen_body_exit(CodeGenerator &g) const override
Generate code for the function body.
Definition: nlpsol.cpp:1269
bool calc_lam_p_
Options.
Definition: nlpsol_impl.hpp:97
Sparsity get_sparsity_out(casadi_int i) override
Sparsities of function inputs and outputs.
Definition: nlpsol.cpp:312
virtual DM getReducedHessian()
Definition: nlpsol.cpp:669
Dict get_stats(void *mem) const override
Get all statistics.
Definition: nlpsol.cpp:1162
Function get_forward(casadi_int nfwd, const std::string &name, const std::vector< std::string > &inames, const std::vector< std::string > &onames, const Dict &opts) const override
Generate a function that calculates forward mode derivatives.
Definition: nlpsol.cpp:869
static const Options options_
Options.
void codegen_body_enter(CodeGenerator &g) const override
Generate code for the function body.
Definition: nlpsol.cpp:1179
void codegen_declarations(CodeGenerator &g) const override
Generate code for the declarations of the C function.
Definition: nlpsol.cpp:1250
void init(const Dict &opts) override
Initialize.
Definition: nlpsol.cpp:420
casadi_int ng_
Number of constraints.
Definition: nlpsol_impl.hpp:69
int eval(const double **arg, double **res, casadi_int *iw, double *w, void *mem) const final
Evaluate numerically.
Definition: nlpsol.cpp:704
Function kkt() const
Definition: nlpsol.cpp:847
virtual void check_inputs(void *mem) const
Check if the inputs correspond to a well-posed problem.
Definition: nlpsol.cpp:613
bool eval_errors_fatal_
Options.
Definition: nlpsol_impl.hpp:93
int init_mem(void *mem) const override
Initalize memory block.
Definition: nlpsol.cpp:603
static ProtoFunction * deserialize(DeserializingStream &s)
Deserialize into MX.
Definition: nlpsol.cpp:1341
Function detect_simple_bounds_parts_
Definition: nlpsol_impl.hpp:85
bool calc_multipliers_
Options.
Definition: nlpsol_impl.hpp:96
static void bound_consistency(casadi_int n, double *z, double *lam, const double *lbz, const double *ubz)
Definition: nlpsol.cpp:678
std::vector< bool > equality_
Options.
bool warn_initial_bounds_
Options.
Definition: nlpsol_impl.hpp:94
static const std::string infix_
Infix.
Dict sens_linsol_options_
Definition: nlpsol_impl.hpp:82
casadi_nlpsol_prob< double > p_nlp_
Definition: nlpsol_impl.hpp:63
void disp_more(std::ostream &stream) const override
Print description.
Definition: nlpsol.cpp:842
void serialize_body(SerializingStream &s) const override
Serialize an object without type information.
Definition: nlpsol.cpp:1306
static std::map< std::string, Plugin > solvers_
Collection of solvers.
bool calc_f_
Options.
Definition: nlpsol_impl.hpp:97
bool calc_g_
Options.
Definition: nlpsol_impl.hpp:97
Function get_reverse(casadi_int nadj, const std::string &name, const std::vector< std::string > &inames, const std::vector< std::string > &onames, const Dict &opts) const override
Generate a function that calculates reverse mode derivatives.
Definition: nlpsol.cpp:1000
std::string class_name() const override
Get type name.
std::vector< char > detect_simple_bounds_is_simple_
Definition: nlpsol_impl.hpp:84
casadi_int np_
Number of parameters.
Definition: nlpsol_impl.hpp:72
double min_lam_
Options.
Definition: nlpsol_impl.hpp:99
Sparsity get_sparsity_in(casadi_int i) override
Sparsities of function inputs and outputs.
Definition: nlpsol.cpp:294
static Function create_oracle(const std::map< std::string, XType > &d, const Dict &opts)
Convert dictionary to Problem.
Definition: nlpsol.cpp:129
bool calc_lam_x_
Options.
Definition: nlpsol_impl.hpp:97
std::vector< casadi_int > detect_simple_bounds_target_g_
Definition: nlpsol_impl.hpp:87
casadi_int callback_step_
Execute the callback function only after this amount of iterations.
Definition: nlpsol_impl.hpp:78
virtual void setOptionsFromFile(const std::string &file)
Read options from parameter xml.
Definition: nlpsol.cpp:674
std::vector< casadi_int > detect_simple_bounds_target_x_
Definition: nlpsol_impl.hpp:86
int callback(NlpsolMemory *m) const
Definition: nlpsol.cpp:1121
~Nlpsol() override=0
Destructor.
Definition: nlpsol.cpp:286
std::vector< bool > discrete_
Options.
casadi_int nx_
Number of variables.
Definition: nlpsol_impl.hpp:66
virtual bool integer_support() const
Can discrete variables be treated.
void set_work(void *mem, const double **&arg, double **&res, casadi_int *&iw, double *&w) const override
Set the (persistent) work vectors.
Definition: nlpsol.cpp:795
bool bound_consistency_
Options.
Definition: nlpsol_impl.hpp:98
bool no_nlp_grad_
Options.
std::string sens_linsol_
Linear solver and options.
Definition: nlpsol_impl.hpp:81
virtual int solve(void *mem) const =0
Function fcallback_
callback function, executed at each iteration
Definition: nlpsol_impl.hpp:75
bool is_a(const std::string &type, bool recursive) const override
Check if the function is of a particular type.
Definition: nlpsol.cpp:290
double get_default_in(casadi_int ind) const override
Get default input value.
Base class for functions that perform calculation with an oracle.
Function oracle_
Oracle: Used to generate other functions.
Function create_function(const Function &oracle, const std::string &fname, const std::vector< std::string > &s_in, const std::vector< std::string > &s_out, const Function::AuxOut &aux=Function::AuxOut(), const Dict &opts=Dict())
void join_results(OracleMemory *m) const
Combine results from different threads.
void init(const Dict &opts) override
int init_mem(void *mem) const override
Initalize memory block.
virtual void codegen_body_enter(CodeGenerator &g) const
Generate code for the function body.
int calc_function(OracleMemory *m, const std::string &fcn, const double *const *arg=nullptr, int thread_id=0) const
std::vector< std::string > get_function() const override
Get list of dependency functions.
static const Options options_
Options.
Dict get_stats(void *mem) const override
Get all statistics.
void serialize_body(SerializingStream &s) const override
Serialize an object without type information.
virtual void codegen_body_exit(CodeGenerator &g) const
Generate code for the function body.
static bool has_plugin(const std::string &pname, bool verbose=false)
Check if a plugin is available or can be loaded.
static Nlpsol * instantiate(const std::string &fname, const std::string &pname, Problem problem)
void serialize_type(SerializingStream &s) const
Serialize type information.
static const Options & plugin_options(const std::string &pname)
Get the plugin options.
static Plugin & getPlugin(const std::string &pname)
Load and get the creator function.
static ProtoFunction * deserialize(DeserializingStream &s)
Deserialize with type disambiguation.
static Plugin load_plugin(const std::string &pname, bool register_plugin=true, bool needs_lock=true)
Load a plugin dynamically.
Base class for FunctionInternal and LinsolInternal.
bool error_on_fail_
Throw an exception on failure?
void print(const char *fmt,...) const
C-style formatted printing during evaluation.
void clear_mem()
Clear all memory (called from destructor)
Helper class for Serialization.
void version(const std::string &name, int v)
void pack(const Sparsity &e)
Serializes an object to the output stream.
GenericShared implements a reference counting framework similar for efficient and.
void disp(std::ostream &stream, bool more=false) const
Print a description of the object.
General sparsity class.
Definition: sparsity.hpp:106
casadi_int numel() const
The total number of elements, including structural zeros, i.e. size2()*size1()
Definition: sparsity.cpp:132
std::string dim(bool with_nz=false) const
Get the dimension as a string.
Definition: sparsity.cpp:587
static Sparsity dense(casadi_int nrow, casadi_int ncol=1)
Create a dense rectangular sparsity pattern *.
Definition: sparsity.cpp:1012
Sparsity T() const
Transpose the matrix.
Definition: sparsity.cpp:394
const casadi_int * row() const
Get a reference to row-vector,.
Definition: sparsity.cpp:164
bool is_empty(bool both=false) const
Check if the sparsity is empty.
Definition: sparsity.cpp:144
const casadi_int * colind() const
Get a reference to the colindex of all column element (see class description)
Definition: sparsity.cpp:168
std::string doc_nlpsol(const std::string &name)
Get the documentation string for a plugin.
Definition: nlpsol.cpp:42
bool has_nlpsol(const std::string &name)
Check if a particular plugin is available.
Definition: nlpsol.cpp:34
void load_nlpsol(const std::string &name)
Explicitly load a plugin dynamically.
Definition: nlpsol.cpp:38
std::string nlpsol_option_info(const std::string &name, const std::string &op)
Get documentation for a particular option.
Definition: nlpsol.cpp:838
casadi_int nlpsol_n_in()
Number of NLP solver inputs.
Definition: nlpsol.cpp:259
std::string nlpsol_option_type(const std::string &name, const std::string &op)
Get type info for a particular option.
Definition: nlpsol.cpp:834
std::vector< std::string > nlpsol_options(const std::string &name)
Get all options for a plugin.
Definition: nlpsol.cpp:830
std::vector< std::string > nlpsol_in()
Get input scheme of NLP solvers.
Definition: nlpsol.cpp:200
Function nlpsol(const std::string &name, const std::string &solver, const SXDict &nlp, const Dict &opts)
Definition: nlpsol.cpp:118
casadi_int nlpsol_n_out()
Number of NLP solver outputs.
Definition: nlpsol.cpp:263
std::vector< std::string > nlpsol_out()
Get NLP solver output scheme of NLP solvers.
Definition: nlpsol.cpp:206
double nlpsol_default_in(casadi_int ind)
Default input for an NLP solver.
Definition: nlpsol.cpp:212
The casadi namespace.
Definition: archiver.cpp:28
NlpsolInput
Input arguments of an NLP Solver.
Definition: nlpsol.hpp:194
@ NLPSOL_P
Value of fixed parameters (np x 1)
Definition: nlpsol.hpp:198
@ NLPSOL_UBX
Decision variables upper bound (nx x 1), default +inf.
Definition: nlpsol.hpp:202
@ NLPSOL_X0
Decision variables, initial guess (nx x 1)
Definition: nlpsol.hpp:196
@ NLPSOL_LAM_G0
Lagrange multipliers for bounds on G, initial guess (ng x 1)
Definition: nlpsol.hpp:210
@ NLPSOL_UBG
Constraints upper bound (ng x 1), default +inf.
Definition: nlpsol.hpp:206
@ NLPSOL_LAM_X0
Lagrange multipliers for bounds on X, initial guess (nx x 1)
Definition: nlpsol.hpp:208
@ NLPSOL_NUM_IN
Definition: nlpsol.hpp:211
@ NLPSOL_LBG
Constraints lower bound (ng x 1), default -inf.
Definition: nlpsol.hpp:204
@ NLPSOL_LBX
Decision variables lower bound (nx x 1), default -inf.
Definition: nlpsol.hpp:200
std::map< std::string, MX > MXDict
Definition: mx.hpp:1009
NlpsolOutput
Output arguments of an NLP Solver.
Definition: nlpsol.hpp:215
@ NLPSOL_G
Constraints function at the optimal solution (ng x 1)
Definition: nlpsol.hpp:221
@ NLPSOL_X
Decision variables at the optimal solution (nx x 1)
Definition: nlpsol.hpp:217
@ NLPSOL_NUM_OUT
Definition: nlpsol.hpp:228
@ NLPSOL_LAM_P
Lagrange multipliers for bounds on P at the solution (np x 1)
Definition: nlpsol.hpp:227
@ NLPSOL_F
Cost function value at the optimal solution (1 x 1)
Definition: nlpsol.hpp:219
@ NLPSOL_LAM_G
Lagrange multipliers for bounds on G at the solution (ng x 1)
Definition: nlpsol.hpp:225
@ NLPSOL_LAM_X
Lagrange multipliers for bounds on X at the solution (nx x 1)
Definition: nlpsol.hpp:223
T get_from_dict(const std::map< std::string, T > &d, const std::string &key, const T &default_value)
void assign_vector(const std::vector< S > &s, std::vector< D > &d)
@ NL_X
Decision variable.
Definition: nlpsol.hpp:170
@ NL_P
Fixed parameter.
Definition: nlpsol.hpp:172
@ NL_NUM_IN
Number of NLP inputs.
Definition: nlpsol.hpp:174
double if_else(double x, double y, double z)
Definition: calculus.hpp:290
@ NL_F
Objective function.
Definition: nlpsol.hpp:183
@ NL_G
Constraint function.
Definition: nlpsol.hpp:185
@ NL_NUM_OUT
Number of NLP outputs.
Definition: nlpsol.hpp:187
int detect_bounds_callback(const double **arg, double **res, casadi_int *iw, double *w, void *callback_data)
Definition: nlpsol.cpp:576
void casadi_copy(const T1 *x, casadi_int n, T1 *y)
COPY: y <-x.
void casadi_fill(T1 *x, casadi_int n, T1 alpha)
FILL: x <- alpha.
@ OT_BOOLVECTOR
@ OT_INTVECTOR
std::map< std::string, SX > SXDict
Definition: sx_fwd.hpp:40
std::string str(const T &v)
String representation, any type.
GenericType::Dict Dict
C++ equivalent of Python's dict or MATLAB's struct.
std::vector< bool > boolvec_not(const std::vector< bool > &v)
Invert all entries.
const double inf
infinity
Definition: calculus.hpp:50
const std::vector< std::string > NL_INPUTS
Shortname for onput arguments of an NLP function.
Definition: nlpsol.hpp:178
const double nan
Not a number.
Definition: calculus.hpp:53
void casadi_scal(casadi_int n, T1 alpha, T1 *x)
SCAL: x <- alpha*x.
std::vector< T > vector_select(const std::vector< T > &v, const std::vector< bool > &s, bool invert=false)
Select subset of vector.
T * get_ptr(std::vector< T > &v)
Get a pointer to the data contained in the vector.
Function construct_nlpsol(const std::string &name, const std::string &solver, const std::map< std::string, X > &nlp, const Dict &opts)
Definition: nlpsol.cpp:47
Matrix< double > DM
Definition: dm_fwd.hpp:33
Function external(const std::string &name, const Importer &li, const Dict &opts)
Load a just-in-time compiled external function.
Definition: external.cpp:42
@ SOLVER_RET_SUCCESS
@ SOLVER_RET_UNKNOWN
@ SOLVER_RET_EXCEPTION
std::vector< casadi_int > boolvec_to_index(const std::vector< bool > &v)
const std::vector< std::string > NL_OUTPUTS
Shortname for output arguments of an NLP function.
Definition: nlpsol.hpp:191
Integrator memory.
Definition: nlpsol_impl.hpp:40
casadi_nlpsol_data< double > d_nlp
Definition: nlpsol_impl.hpp:42
Options metadata for a class.
Definition: options.hpp:40
std::string type(const std::string &name) const
Definition: options.cpp:289
std::vector< std::string > all() const
Definition: options.cpp:283
std::string info(const std::string &name) const
Definition: options.cpp:295
std::map< std::string, FStats > fstats
void add_stat(const std::string &s)
const T1 * lam_g0
Definition: casadi_nlp.hpp:87
const T1 * lam_x0
Definition: casadi_nlp.hpp:87
casadi_nlpsol_detect_bounds_prob< T1 > detect_bounds
Definition: casadi_nlp.hpp:46