optistack_internal.cpp
1 /*
2  * This file is part of CasADi.
3  *
4  * CasADi -- A symbolic framework for dynamic optimization.
5  * Copyright (C) 2010-2023 Joel Andersson, Joris Gillis, Moritz Diehl,
6  * KU Leuven. All rights reserved.
7  * Copyright (C) 2011-2014 Greg Horn
8  *
9  * CasADi is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 3 of the License, or (at your option) any later version.
13  *
14  * CasADi is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with CasADi; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  *
23  */
24 
25 #include "optistack_internal.hpp"
26 #include "nlpsol.hpp"
27 #include "conic.hpp"
28 #include "function_internal.hpp"
29 #include "global_options.hpp"
30 #include "filesystem_impl.hpp"
31 
32 namespace casadi {
33 
34 class InternalOptiCallback : public FunctionInternal {
35  public:
36 
37  InternalOptiCallback(OptiNode& sol) : FunctionInternal(class_name()), sol_(sol) {}
38 
39  ~InternalOptiCallback() override {
40  clear_mem();
41  }
42 
44  std::string class_name() const override {return "InternalOptiCallback";}
45 
46  // Number of inputs and outputs
47  size_t get_n_in() override { return nlpsol_n_out();}
48 
49  Sparsity get_sparsity_in(casadi_int i) override {
50  std::string n = nlpsol_out(i);
51  casadi_int size = 0;
52  if (n=="f") {
53  size = 1;
54  } else if (n=="lam_x" || n=="x") {
55  size = sol_.nx();
56  } else if (n=="lam_g" || n=="g") {
57  size = sol_.ng();
58  } else if (n=="p" || n=="lam_p") {
59  size = sol_.np();
60  if (size==0) return Sparsity::dense(0, 0);
61  } else {
62  return Sparsity::dense(0, 0);
63  }
64  return Sparsity::dense(size, 1);
65  }
66 
67  void reset() { i=0; }
68 
69  // eval_dm has been defined instead of eval
70  bool has_eval_dm() const override { return true;}
71 
73  std::vector<DM> eval_dm(const std::vector<DM>& arg) const override {
74  DMDict r;
75 
76  for (casadi_int i=0;i<nlpsol_n_out();++i) {
77  r[nlpsol_out(i)] = arg[i];
78  }
79 
80  sol_.res(r);
81 
82  if (sol_.user_callback_) sol_.user_callback_->call(i);
83 
84  i+=1;
85  return {0};
86  }
87 
88  bool associated_with(const OptiNode* o) { return &sol_==o; }
89 
90  private:
91  OptiNode& sol_;
92  mutable casadi_int i;
93 };
94 
95 OptiNode* OptiNode::create(const std::string& problem_type) {
96 return new OptiNode(problem_type);
97 }
98 
99 
101  user_callback_ = callback;
102 }
103 
105  user_callback_ = nullptr;
106 }
107 
109  return user_callback_ != 0;
110 }
111 
112 std::string OptiNode::format_stacktrace(const Dict& stacktrace, casadi_int indent) {
113  std::string s_indent;
114  for (casadi_int i=0;i<indent;++i) {
115  s_indent+= " ";
116  }
117  std::string description;
118  std::string filename = stacktrace.at("file").as_string();
119  casadi_int line = stacktrace.at("line").as_int();
120  description += "called from " + filename +":"+str(line);
121  std::string name = stacktrace.at("name").as_string();
122  if (name!="Unknown" && name!= "<module>")
123  description += " in " + stacktrace.at("name").as_string();
124  try {
125  auto file_ptr = Filesystem::ifstream_ptr(filename);
126  std::istream& file = *file_ptr;
127  for (casadi_int i=0; i<line-1; ++i) {
128  file.ignore(std::numeric_limits<std::streamsize>::max(), '\n');
129  }
130  std::string contents; std::getline(file, contents);
131  auto it = contents.find_first_not_of(" \t\r\n");
132  if (it!=std::string::npos) {
133  description += "\n" + s_indent + contents.substr(it);
134  }
135  } catch(...) {
136  // pass
137  }
138  return description;
139 }
140 
141 std::string OptiNode::describe(const MX& expr, casadi_int indent, const Dict& opts) const {
142  if (problem_dirty()) return baked_copy().describe(expr, indent, opts);
143  casadi_int max_stacktrace_depth = 1;
144  for (const auto& op : opts) {
145  if (op.first=="max_stacktrace_depth") {
146  max_stacktrace_depth = op.second.as_int();
147  } else {
148  casadi_warning("Unknown option '" + op.first + "'");
149  }
150  }
151  std::string s_indent;
152  for (casadi_int i=0;i<indent;++i) {
153  s_indent+= " ";
154  }
155  std::string description = s_indent;
156  if (expr.is_symbolic()) {
157  if (has(expr)) {
158  description += "Opti " + variable_type_to_string(meta(expr).type) + " '" + expr.name() +
159  "' of shape " + expr.dim();
160  const Dict& extra = meta(expr).extra;
161  auto it = extra.find("stacktrace");
162  if (it!=extra.end()) {
163  for (const Dict& stacktrace : it->second.as_dict_vector()) {
164  description += ", " + format_stacktrace(stacktrace, indent+1);
165  if (--max_stacktrace_depth==0) break;
166  }
167  }
168  } else {
169  VariableType vt;
170  if (parse_opti_name(expr.name(), vt)) {
171  description += "Opti " + variable_type_to_string(vt) + " '" + expr.name() +
172  "' of shape " + expr.dim()+
173  ", belonging to a different instance of Opti.";
174  } else {
175  description += "MX symbol '" + expr.name() + "' of shape " + expr.dim();
176  description += ", declared outside of Opti.";
177  }
178  }
179  } else {
180  if (has_con(expr)) {
181  description = "Opti constraint of shape " + expr.dim();
182  const Dict& extra = meta_con(expr).extra;
183  auto it = extra.find("stacktrace");
184  if (it!=extra.end()) {
185  for (const Dict& stacktrace : it->second.as_dict_vector()) {
186  description += ", " + format_stacktrace(stacktrace, indent+1);
187  if (--max_stacktrace_depth==0) break;
188  }
189  }
190  } else {
191  std::vector<MX> s = symvar(expr);
192  if (s.empty()) {
193  description+= "Constant epxression.";
194  } else {
195  description+= "General expression, dependent on " + str(s.size()) + " symbols:";
196  for (casadi_int i=0;i<s.size();++i) {
197  description+= "\n"+describe(s[i], indent+1);
198  if (i>5) {
199  description+= "\n...";
200  break;
201  }
202  }
203  }
204  }
205  }
206 
207  return description;
208 }
209 
210 std::string OptiNode::g_describe(casadi_int i, const Dict& opts) const {
211  if (problem_dirty()) return baked_copy().g_describe(i, opts);
212  MX expr = g_lookup(i);
213  casadi_int local_i = i-meta_con(expr).start + GlobalOptions::start_index;
214  std::string description = describe(expr, 0, opts);
215  if (expr.numel()>1)
216  description += "\nAt nonzero " + str(local_i % expr.numel()) +
217  ", part " + str((casadi_int) local_i / expr.numel()) + ".";
218  return description;
219 }
220 
221 std::string OptiNode::x_describe(casadi_int i, const Dict& opts) const {
222  if (problem_dirty()) return baked_copy().x_describe(i, opts);
223  MX symbol = x_lookup(i);
224  casadi_int local_i = i-meta(symbol).start + GlobalOptions::start_index;
225  std::string description = describe(symbol, 0, opts);
226  if (symbol.numel()>1)
227  description += "\nAt nonzero " + str(local_i) + ".";
228  return description;
229 }
230 
231 MX OptiNode::x_lookup(casadi_int i) const {
232  if (problem_dirty()) return baked_copy().x_lookup(i);
233  casadi_assert_dev(i>=0);
234  casadi_assert_dev(i<nx());
235  std::vector<MX> x = active_symvar(OPTI_VAR);
236  for (const auto& e : x) {
237  const MetaVar& m = meta(e);
238  if (i>=m.start && i<m.stop) return e;
239  }
240  casadi_error("Internal error");
241  return MX();
242 }
243 
244 MX OptiNode::g_lookup(casadi_int i) const {
245  if (problem_dirty()) return baked_copy().g_lookup(i);
246  casadi_assert_dev(i>=0);
247  casadi_assert_dev(i<ng());
248  for (const auto& e : g_) {
249  const MetaCon& m = meta_con(e);
250  if (i>=m.start && i<m.stop) return e;
251  }
252  casadi_error("Internal error");
253  return MX();
254 }
255 
256 OptiNode::OptiNode(const std::string& problem_type) :
257  count_(0), count_var_(0), count_par_(0), count_dual_(0) {
258  f_ = 0;
259  f_linear_scale_ = 1;
260  instance_number_ = instance_count_++;
261  user_callback_ = nullptr;
262  store_initial_[OPTI_VAR] = {};
263  store_initial_[OPTI_PAR] = {};
264  store_initial_[OPTI_DUAL_G] = {};
265  store_latest_[OPTI_VAR] = {};
266  store_latest_[OPTI_DUAL_G] = {};
267  casadi_assert(problem_type=="nlp" || problem_type=="conic",
268  "Specified problem type '" + problem_type + "'unknown. "
269  "Choose 'nlp' (default) or 'conic'.");
270  problem_type_ = problem_type;
272  helpers_ = std::make_shared<ValueCache>();
273 }
274 
276 }
277 
278 MX OptiNode::variable(casadi_int n, casadi_int m, const std::string& attribute) {
279 
280  // Prepare metadata
281  MetaVar meta_data;
282  meta_data.attribute = attribute;
283  meta_data.n = n;
284  meta_data.m = m;
285  meta_data.type = OPTI_VAR;
286  meta_data.count = count_++;
287  meta_data.i = count_var_++;
288  meta_data.domain = OPTI_DOMAIN_REAL;
289 
290  MX symbol, ret;
291 
292  if (attribute=="symmetric") {
293  casadi_assert(n==m, "You specified attribute 'symmetric', "
294  "while matrix is not even square, but " + str(n) + "-by-" + str(m) + ".");
295  symbol = MX::sym(name_prefix() + "x_" + str(count_var_), n*(n+1)/2);
296  ret = tril2symm(MX(Sparsity::lower(n), symbol));
297  } else if (attribute=="full") {
298  symbol = MX::sym(name_prefix() + "x_" + str(count_var_), n, m);
299  ret = symbol;
300  } else {
301  casadi_error("Unknown attribute '" + attribute + "'. Choose from 'full' or 'symmetric'.");
302  }
303 
304  // Store the symbol; preventing it from going ut of scope
305  symbols_.push_back(symbol);
306  store_initial_[OPTI_VAR].push_back(DM::zeros(symbol.sparsity()));
307  store_latest_[OPTI_VAR].push_back(DM::nan(symbol.sparsity()));
308  store_linear_scale_[OPTI_VAR].push_back(DM::ones(symbol.sparsity()));
309  store_linear_scale_offset_[OPTI_VAR].push_back(DM::zeros(symbol.sparsity()));
310 
311  set_meta(symbol, meta_data);
312  return ret;
313 }
314 
315 MX OptiNode::variable(const MX& symbol, const std::string& attribute) {
316 
317  // Prepare metadata
318  MetaVar meta_data;
319  meta_data.attribute = attribute;
320  meta_data.n = symbol.size1();
321  meta_data.m = symbol.size2();
322  meta_data.type = OPTI_VAR;
323  meta_data.count = count_++;
324  meta_data.i = count_var_++;
325 
326  // Store the symbol; preventing it from going ut of scope
327  symbols_.push_back(symbol);
328  store_initial_[OPTI_VAR].push_back(DM::zeros(symbol.sparsity()));
329  store_latest_[OPTI_VAR].push_back(DM::nan(symbol.sparsity()));
330  store_linear_scale_[OPTI_VAR].push_back(DM::ones(symbol.sparsity()));
331  store_linear_scale_offset_[OPTI_VAR].push_back(DM::zeros(symbol.sparsity()));
332 
333  set_meta(symbol, meta_data);
334  return symbol;
335 }
336 
337 MX OptiNode::variable(const Sparsity& sp, const std::string& attribute) {
338 
339  // Prepare metadata
340  MetaVar meta_data;
341  meta_data.attribute = attribute;
342  meta_data.n = sp.size1();
343  meta_data.m = sp.size2();
344  meta_data.type = OPTI_VAR;
345  meta_data.count = count_++;
346  meta_data.i = count_var_++;
347 
348  MX symbol = MX::sym(name_prefix() + "x_" + str(count_var_), sp);
349 
350  // Store the symbol; preventing it from going ut of scope
351  symbols_.push_back(symbol);
352  store_initial_[OPTI_VAR].push_back(DM::zeros(symbol.sparsity()));
353  store_latest_[OPTI_VAR].push_back(DM::nan(symbol.sparsity()));
354  store_linear_scale_[OPTI_VAR].push_back(DM::ones(symbol.sparsity()));
355  store_linear_scale_offset_[OPTI_VAR].push_back(DM::zeros(symbol.sparsity()));
356 
357  set_meta(symbol, meta_data);
358  return symbol;
359 }
360 
361 std::string OptiNode::name_prefix() const {
362  return "opti" + str(instance_number_) + "_";
363 }
364 
366  return Opti::create(new OptiNode(*this));
367 }
368 
369 void OptiNode::register_dual(MetaCon& c) {
370 
371  // Prepare metadata
372  MetaVar meta_data;
373  meta_data.attribute = "full";
374  meta_data.n = c.canon.size1();
375  meta_data.m = c.canon.size2();;
376  meta_data.type = OPTI_DUAL_G;
377  meta_data.count = count_++;
378  meta_data.i = count_dual_++;
379 
380  MX symbol, ret;
381  if (c.type==OPTI_PSD) {
382  symbol = MX();
383  ret = MX();
384  } else {
385  symbol = MX::sym(name_prefix()+"lam_g_"+str(count_dual_), c.canon.sparsity());
386 
387  casadi_assert_dev(c.canon.is_dense());
388 
389  Sparsity ret_sp = repmat(c.original.sparsity(), 1, c.n);
390 
391  casadi_int N = c.canon.sparsity().nnz();
392 
393  MX flat = vec(symbol);
394  if (c.type==OPTI_DOUBLE_INEQUALITY) {
395  MX v = MX::sym("v");
396  MX decide_left_right = vertcat(if_else_zero(v<0, -v), if_else_zero(v>=0, v));
397  Function sign = Function("sign", {v}, {decide_left_right});
398  Function sign_map = sign.map(c.canon.sparsity().nnz());
399  ret = MX(ret_sp, sign_map((c.flipped ? -1 : 1)*flat)[0].T());
400  } else {
401  casadi_int block_size = N / c.n;
402  std::vector<MX> original_blocks = vertsplit(fabs(flat), block_size);
403  std::vector<MX> blocks(N);
404  for (casadi_int i=0;i<c.n;++i) {
405  casadi_int p = c.flipped? c.n-i-1: i;
406  blocks[p] = original_blocks[i];
407  }
408  ret = MX(ret_sp, vertcat(blocks));
409  }
410  }
411 
412  symbols_.push_back(symbol);
413  store_initial_[OPTI_DUAL_G].push_back(DM::zeros(symbol.sparsity()));
414  store_latest_[OPTI_DUAL_G].push_back(DM::nan(symbol.sparsity()));
415 
416  c.dual = ret;
417  c.dual_canon = symbol;
418 
419  set_meta(symbol, meta_data);
420 }
421 
422 MX OptiNode::parameter(const MX& symbol, const std::string& attribute) {
423  casadi_assert_dev(attribute=="full");
424 
425  // Prepare metadata
426  MetaVar meta_data;
427  meta_data.attribute = attribute;
428  meta_data.n = symbol.size1();
429  meta_data.m = symbol.size2();
430  meta_data.type = OPTI_PAR;
431  meta_data.count = count_++;
432  meta_data.i = count_par_++;
433 
434  symbols_.push_back(symbol);
435  store_initial_[OPTI_PAR].push_back(DM::nan(symbol.sparsity()));
436 
437  set_meta(symbol, meta_data);
438  return symbol;
439 }
440 
441 MX OptiNode::parameter(const Sparsity& sp, const std::string& attribute) {
442  casadi_assert_dev(attribute=="full");
443 
444  // Prepare metadata
445  MetaVar meta_data;
446  meta_data.attribute = attribute;
447  meta_data.n = sp.size1();
448  meta_data.m = sp.size2();
449  meta_data.type = OPTI_PAR;
450  meta_data.count = count_++;
451  meta_data.i = count_par_++;
452 
453  MX symbol = MX::sym(name_prefix() + "p_" + str(count_par_), sp);
454  symbols_.push_back(symbol);
455  store_initial_[OPTI_PAR].push_back(DM::nan(symbol.sparsity()));
456 
457  set_meta(symbol, meta_data);
458  return symbol;
459 }
460 
461 MX OptiNode::parameter(casadi_int n, casadi_int m, const std::string& attribute) {
462  casadi_assert_dev(attribute=="full");
463 
464  // Prepare metadata
465  MetaVar meta_data;
466  meta_data.attribute = attribute;
467  meta_data.n = n;
468  meta_data.m = m;
469  meta_data.type = OPTI_PAR;
470  meta_data.count = count_++;
471  meta_data.i = count_par_++;
472 
473  MX symbol = MX::sym(name_prefix() + "p_" + str(count_par_), n, m);
474  symbols_.push_back(symbol);
475  store_initial_[OPTI_PAR].push_back(DM::nan(symbol.sparsity()));
476 
477  set_meta(symbol, meta_data);
478  return symbol;
479 }
480 
482  assert_solved();
483  if (stats_.empty()) {
484  stats_ = solver_.stats();
485  is_simple_ = get_from_dict(stats_,
486  "detect_simple_bounds_is_simple",
487  std::vector<bool>(ng(), false));
488  target_x_ = get_from_dict(stats_,
489  "detect_simple_bounds_target_x",
490  std::vector<casadi_int>{});
491  casadi_assert_dev(is_simple_.size()==ng());
492 
493  g_index_reduce_g_.resize(ng());
494  g_index_reduce_x_.resize(ng(), -1);
495  g_index_unreduce_g_.resize(ng(), -1);
496 
497  casadi_int* target_x_ptr = target_x_.data();
498 
499  casadi_int k=0;
500  for (casadi_int i=0;i<is_simple_.size();++i) {
501  if (!is_simple_[i]) {
502  g_index_reduce_g_[i] = k;
503  g_index_unreduce_g_[k] = i;
504  k++;
505  } else {
506  g_index_reduce_g_[i] = -1;
507  g_index_reduce_x_[i] = *target_x_ptr;
508  target_x_ptr++;
509  }
510  }
511 
512  reduced_ = any(is_simple_);
513  }
514  return stats_;
515 }
516 
517 casadi_int OptiNode::g_index_reduce_g(casadi_int i) const {
518  stats();
519  return g_index_reduce_g_[i];
520 }
521 casadi_int OptiNode::g_index_unreduce_g(casadi_int i) const {
522  stats();
523  return g_index_unreduce_g_[i];
524 }
525 casadi_int OptiNode::g_index_reduce_x(casadi_int i) const {
526  stats();
527  return g_index_reduce_x_[i];
528 }
529 
530 std::string OptiNode::return_status() const {
531  Dict mystats;
532  try {
533  mystats = stats();
534  } catch (...) {
535  //
536  }
537  if (mystats.find("return_status")!=mystats.end()) {
538  std::stringstream ss;
539  ss << mystats.at("return_status");
540  return ss.str();
541  }
542  return "unknown";
543 }
544 
545 bool OptiNode::return_success(bool accept_limit) const {
546  Dict mystats;
547  try {
548  mystats = stats();
549  } catch (...) {
550  //
551  }
552  bool success = false;
553  if (mystats.find("success")!=mystats.end()) success = mystats.at("success");
554  if (!accept_limit) return success;
555 
556  bool limited = false;
557  if (mystats.find("unified_return_status")!=mystats.end())
558  limited = mystats.at("unified_return_status")=="SOLVER_RET_LIMITED";
559  return success || limited;
560 }
561 
563  return solver_;
564 }
565 
566 void OptiNode::set_meta(const MX& m, const MetaVar& meta) {
567  meta_[m.get()] = meta;
568 }
569 
570 void OptiNode::set_meta_con(const MX& m, const MetaCon& meta) {
571  meta_con_[m.get()] = meta;
572 }
573 
574 void OptiNode::update_user_dict(const MX& m, const Dict& meta) {
575  if (has_con(m)) {
576  MetaCon m_update = get_meta_con(m);
577  MetaVar m_update2 = get_meta(m_update.dual_canon);
578  for (const auto & it : meta) {
579  m_update.extra[it.first] = it.second;
580  m_update2.extra[it.first] = it.second;
581  }
582  set_meta_con(m, m_update);
583  set_meta(m_update.dual_canon, m_update2);
584  } else {
585  for (const auto & s : MX::symvar(m)) {
586  MetaVar m_update = get_meta(s);
587  for (const auto & it : meta)
588  m_update.extra[it.first] = it.second;
589  set_meta(s, m_update);
590  }
591  }
592 }
593 
594 Dict OptiNode::user_dict(const MX& m) const {
595  if (has_con(m)) {
596  MetaCon meta = get_meta_con(m);
597  return meta.extra;
598  } else {
599  MetaVar meta = get_meta(m);
600  return meta.extra;
601  }
602 }
603 
604 MX OptiNode::dual(const MX& m) const {
605  return meta_con(m).dual;
606 }
607 
608 const MetaVar& OptiNode::meta(const MX& m) const {
609  assert_has(m);
610  auto find = meta_.find(m.get());
611  return find->second;
612 }
613 
614 const MetaCon& OptiNode::meta_con(const MX& m) const {
615  assert_has_con(m);
616  auto find = meta_con_.find(m.get());
617  return find->second;
618 }
619 
620 MetaVar& OptiNode::meta(const MX& m) {
621  assert_has(m);
622  auto find = meta_.find(m.get());
623  return find->second;
624 }
625 
626 MetaCon& OptiNode::meta_con(const MX& m) {
627  assert_has_con(m);
628  auto find = meta_con_.find(m.get());
629  return find->second;
630 }
631 
632 MetaVar OptiNode::get_meta(const MX& m) const {
633  return meta(m);
634 }
635 
637  return meta_con(m);
638 }
639 
640 bool OptiNode::has(const MX& m) const {
641  return meta_.find(m.get())!=meta_.end();
642 }
643 
644 bool OptiNode::has_con(const MX& m) const {
645  return meta_con_.find(m.get())!=meta_con_.end();
646 }
647 
648 void OptiNode::assert_has(const MX& m) const {
649  if (!has(m)) {
650  VariableType vt;
651  casadi_assert(m.is_symbolic(), "Symbol expected, got expression.");
652  if (parse_opti_name(m.name(), vt)) {
653  casadi_error("Unknown: " + describe(m));
654  } else {
655  casadi_error("Unknown: " + describe(m) + "\n"
656  "Note: you cannot use a raw MX.sym in your Opti problem,"
657  " only if you package it in a CasADi Function.");
658  }
659  }
660 }
661 
662 void OptiNode::assert_has_con(const MX& m) const {
663  casadi_assert(has_con(m), "Constraint not present in Opti stack.");
664 }
665 
666 casadi_int OptiNode::instance_count_ = 0;
667 
668 bool OptiNode::parse_opti_name(const std::string& name, VariableType& vt) const {
669  casadi_int i = name.find("opti");
670  if (i!=0) return false;
671 
672  i = name.find("_");
673  i++;
674  if (i==std::string::npos) return false;
675  if (name.substr(i, 1)=="x") {
676  vt = OPTI_VAR;
677  return true;
678  } else if (name.substr(i, 1)=="p") {
679  vt = OPTI_PAR;
680  return true;
681  } else if (name.substr(i, 5)=="lam_g") {
682  vt = OPTI_DUAL_G;
683  return true;
684  }
685 
686  return false;
687 }
688 
689 std::string OptiNode::variable_type_to_string(VariableType vt) const {
690  auto it = VariableType2String_.find(vt);
691  if (it==VariableType2String_.end()) return "unknown variable type";
692  return it->second;
693 
694 }
695 std::map<VariableType, std::string> OptiNode::VariableType2String_ =
696  {{OPTI_VAR, "decision variable"},
697  {OPTI_PAR, "parameter"},
698  {OPTI_DUAL_G, "dual variable"}};
699 
700 std::vector<MX> OptiNode::initial() const {
701  std::vector<MX> ret;
702  for (const auto& e : symvar()) {
703  if (meta(e).type==OPTI_VAR || meta(e).type==OPTI_DUAL_G)
704  ret.push_back(e==store_initial_.at(meta(e).type)[meta(e).i]);
705  }
706  return ret;
707 }
708 
709 std::vector<MX> OptiNode::value_variables() const {
710  std::vector<MX> ret;
711  for (const auto& e : symvar()) {
712  if (meta(e).type==OPTI_VAR)
713  ret.push_back(e==store_latest_.at(meta(e).type)[meta(e).i]);
714  }
715  return ret;
716 }
717 
718 std::vector<MX> OptiNode::value_parameters() const {
719  std::vector<MX> ret;
720  for (const auto& e : symvar()) {
721  if (meta(e).type==OPTI_PAR)
722  ret.push_back(e==store_initial_.at(meta(e).type)[meta(e).i]);
723  }
724  return ret;
725 }
726 
728  casadi_assert(!f_.is_empty() || !g_.empty(),
729  "You need to specify at least an objective (y calling 'minimize'), "
730  "or a constraint (by calling 'subject_to').");
731 
732  symbol_active_.clear();
733  symbol_active_.resize(symbols_.size());
734  helpers_ = std::make_shared<ValueCache>();
735 
736  // Gather all expressions
737  MX total_expr = vertcat(f_, veccat(g_));
738 
739  // Categorize the symbols appearing in those expressions
740  for (const auto& d : symvar(total_expr))
741  symbol_active_[meta(d).count] = true;
742 
743  std::vector<MX> x = active_symvar(OPTI_VAR);
744  for (casadi_int i=0;i<x.size();++i) meta(x[i]).active_i = i;
745 
746  casadi_int offset = 0;
747  for (const auto& v : x) {
748  meta(v).start = offset;
749  offset+= v.nnz();
750  meta(v).stop = offset;
751  }
752  std::vector<MX> p = active_symvar(OPTI_PAR);
753  for (casadi_int i=0;i<p.size();++i) meta(p[i]).active_i = i;
754 
755  // Fill the nlp definition
756  nlp_["x"] = veccat(x);
757  nlp_["p"] = veccat(p);
758 
759  nlp_unscaled_["x"] = veccat(x);
760  nlp_unscaled_["p"] = veccat(p);
761 
762  discrete_.clear();
763  for (const MX& e : x) {
764  discrete_.insert(discrete_.end(), e.nnz(), meta(e).domain == OPTI_DOMAIN_INTEGER);
765  }
766 
767  nlp_["f"] = f_;
768  nlp_unscaled_["f"] = f_;
769 
770  offset = 0;
771  for (casadi_int i=0;i<g_.size();++i) {
772  MetaCon& r = meta_con(g_[i]);
773  MetaVar& r2 = meta(r.dual_canon);
774  symbol_active_[r2.count] = true;
775 
776  // Compute offsets for this constraint:
777  // location into the global constraint variable
778  r.start = offset;
779  offset+= r.canon.nnz();
780  r.stop = offset;
781 
782  r2.start = r.start;
783  r2.stop = r.stop;
784 
785  }
786  index_all_to_g_.resize(offset);
787  offset = 0;
788  casadi_int offset_g = 0;
789  for (const auto& g : g_) {
790  const MetaCon& r = meta_con(g);
791  if (r.type==OPTI_PSD) {
792  for (casadi_int i=0;i<r.stop-r.start;++i) {
793  index_all_to_g_[r.start+i] = -1;
794  }
795  } else {
796  for (casadi_int i=0;i<r.stop-r.start;++i) {
797  index_all_to_g_[r.start+i] = offset_g+i;
798  }
799  offset_g += r.canon.nnz();
800  }
801  }
802 
803  std::vector<MX> lam = active_symvar(OPTI_DUAL_G);
804  for (casadi_int i=0;i<lam.size();++i) meta(lam[i]).active_i = i;
805 
806  lam_ = veccat(lam);
807 
808  // Collect bounds and canonical form of constraints
809  std::vector<MX> g_all, g_unscaled_all;
810  std::vector<MX> h_all, h_unscaled_all;
811  std::vector<MX> lbg_all, lbg_unscaled_all;
812  std::vector<MX> ubg_all, ubg_unscaled_all;
813 
814  g_linear_scale_.clear();
815  h_linear_scale_.clear();
816  std::vector<DM> g_linear_scale, h_linear_scale;
817 
818  equality_.clear();
819  for (const auto& g : g_) {
820  if (meta_con(g).type==OPTI_PSD) {
821  h_all.push_back(meta_con(g).canon/meta_con(g).linear_scale);
822  if (meta_con(g).canon.numel()==meta_con(g).linear_scale.numel()) {
823  h_linear_scale.push_back(meta_con(g).linear_scale);
824  } else {
825  casadi_assert_dev(meta_con(g).linear_scale.numel()==1);
826  h_linear_scale.push_back(DM::ones(meta_con(g).canon.sparsity())*meta_con(g).linear_scale);
827  }
828  h_unscaled_all.push_back(meta_con(g).canon);
829  } else {
830  g_all.push_back(meta_con(g).canon/meta_con(g).linear_scale);
831  if (meta_con(g).canon.numel()==meta_con(g).linear_scale.numel()) {
832  g_linear_scale.push_back(meta_con(g).linear_scale);
833  } else {
834  casadi_assert_dev(meta_con(g).linear_scale.numel()==1);
835  g_linear_scale.push_back(DM::ones(meta_con(g).canon.sparsity())*meta_con(g).linear_scale);
836  }
837  g_unscaled_all.push_back(meta_con(g).canon);
838  lbg_all.push_back(meta_con(g).lb/meta_con(g).linear_scale);
839  lbg_unscaled_all.push_back(meta_con(g).lb);
840  ubg_all.push_back(meta_con(g).ub/meta_con(g).linear_scale);
841  ubg_unscaled_all.push_back(meta_con(g).ub);
842 
843  equality_.insert(equality_.end(),
844  meta_con(g).canon.numel(),
845  meta_con(g).type==OPTI_EQUALITY || meta_con(g).type==OPTI_GENERIC_EQUALITY);
846  }
847  }
848 
849  nlp_["g"] = veccat(g_all);
850  g_linear_scale_ = veccat(g_linear_scale).nonzeros();
851  nlp_unscaled_["g"] = veccat(g_unscaled_all);
852  if (problem_type_=="conic") {
853  nlp_["h"] = diagcat(h_all);
854  nlp_unscaled_["h"] = diagcat(h_unscaled_all);
855  h_linear_scale_ = veccat(h_linear_scale).nonzeros();
856  }
857 
858  // Get scaling data
859  std::vector<DM> linear_scale = active_values(OPTI_VAR, store_linear_scale_);
860  std::vector<DM> linear_scale_offset = active_values(OPTI_VAR, store_linear_scale_offset_);
861 
862  linear_scale_ = veccat(linear_scale).nonzeros();
863  linear_scale_offset_ = veccat(linear_scale_offset).nonzeros();
864 
865  // Unscaled version of x
866  std::vector<MX> x_unscaled(x.size());
867  for (casadi_int i=0;i<x.size();++i) {
868  x_unscaled[i] = x[i]*linear_scale[i] + linear_scale_offset[i];
869  }
870 
871  // Perform substitution
872  std::vector<MX> expr = {nlp_["f"], nlp_["g"]};
873  if (problem_type_=="conic") expr.push_back(nlp_["h"]);
874  std::vector<MX> fgh = substitute(expr, x, x_unscaled);
875  nlp_["f"] = fgh[0];
876  nlp_["g"] = fgh[1];
877  if (problem_type_=="conic") {
878  nlp_["h"] = fgh[2];
879  }
880 
881  // Scale of objective
882  nlp_["f"] = nlp_["f"]/f_linear_scale_;
883 
884  // Create bounds helper function
885  MXDict bounds;
886  bounds["p"] = nlp_["p"];
887  bounds_lbg_ = veccat(lbg_all);
888  bounds_ubg_ = veccat(ubg_all);
889  bounds_unscaled_lbg_ = veccat(lbg_unscaled_all);
890  bounds_unscaled_ubg_ = veccat(ubg_unscaled_all);
891 
892  bounds["lbg"] = bounds_lbg_;
893  bounds["ubg"] = bounds_ubg_;
894 
895  bounds_ = Function("bounds", bounds, {"p"}, {"lbg", "ubg"});
896  mark_problem_dirty(false);
897 }
898 
900  if (problem_dirty()) return baked_copy().scale_helper(h);
901  std::string name = h.name();
902  MX g_linear_scale_mx = g_linear_scale();
903  MX g_linear_scale_inv = 1/g_linear_scale();
904  MX f_linear_scale_mx = f_linear_scale();
905  MX x_linear_scale_mx = x_linear_scale();
906  MX x_linear_scale_offset_mx = x_linear_scale_offset();
907  if (name=="nlp_jac_g") {
908  MXDict arg;
909  arg["x"] = MX::sym("x", nx());
910  arg["p"] = MX::sym("p", np());
911  MXDict args;
912  args["x"] = arg["x"]*x_linear_scale_mx+x_linear_scale_offset_mx;
913  args["p"] = arg["p"];
914  MXDict res = h(args);
915  for (const auto & it : res) {
916  if (it.first=="g") {
917  arg[it.first] = it.second*g_linear_scale_inv;
918  } else if (it.first=="jac_g_x") {
919  arg[it.first] = mtimes(
920  mtimes(diag(g_linear_scale_inv), it.second),
921  diag(x_linear_scale_mx));
922  } else {
923  casadi_error("Unknown output '" + it.first + "'. Expecting g, jac_g_x.");
924  }
925  }
926  Function ret(name, arg, h.name_in(), h.name_out());
927  return ret;
928  } else if (name=="nlp_hess_l") {
929  MXDict arg;
930  arg["x"] = MX::sym("x", nx());
931  arg["p"] = MX::sym("p", np());
932  arg["lam_f"] = MX::sym("lam_f");
933  arg["lam_g"] = MX::sym("lam_g", ng());
934  MXDict args;
935  args["x"] = arg["x"]*x_linear_scale_mx+x_linear_scale_offset_mx;
936  args["p"] = arg["p"];
937  args["lam_f"] = arg["lam_f"]/f_linear_scale_mx;
938  args["lam_g"] = arg["lam_g"]/g_linear_scale_mx;
939  MXDict res = h(args);
940  for (const auto & it : res) {
941  if (it.first=="triu_hess_gamma_x_x" ||
942  it.first=="hess_gamma_x_x" ||
943  (it.second.size1()==nx() && it.second.is_square())) {
944  MX D = diag(x_linear_scale_mx);
945  arg[it.first] = mtimes(mtimes(D, it.second), D);
946  } else {
947  casadi_error("Unknown output '" + it.first + "'. Expecting triu_hess_gamma_x_x");
948  }
949  }
950  Function ret(name, arg, h.name_in(), h.name_out());
951  return ret;
952  } else {
953  casadi_error("Unknown helper function '" + name + "'");
954  }
955 }
956 
957 void OptiNode::solver(const std::string& solver_name, const Dict& plugin_options,
958  const Dict& solver_options) {
959  solver_name_ = solver_name;
960  solver_options_ = plugin_options;
961  if (!solver_options.empty())
962  solver_options_[solver_name] = solver_options;
964 }
965 
966 std::vector<MX> OptiNode::sort(const std::vector<MX>& v) const {
967  // We exploit the fact that std::map is ordered
968 
969  // Populate map
970  std::map<casadi_int, MX> unordered;
971  for (const auto& d : v)
972  unordered[meta(d).count] = d;
973 
974  // Loop over map (ordered)
975  std::vector<MX> ret;
976  for (auto const &e : unordered)
977  ret.push_back(e.second);
978  return ret;
979 }
980 
981 std::vector<MX> OptiNode::symvar() const {
982  return symbols_;
983 }
984 
985 std::vector<MX> OptiNode::symvar(const MX& expr) const {
986  return sort(MX::symvar(expr));
987 }
988 
989 std::vector<MX> OptiNode::ineq_unchain(const MX& a, bool& flipped) {
990  flipped = false;
991  casadi_assert_dev(a.is_op(OP_LE) || a.is_op(OP_LT));
992 
993  // Is there inequalities in the left or right leaf?
994  bool left = a.dep(0).is_op(OP_LE) || a.dep(0).is_op(OP_LT);
995  bool right = a.dep(1).is_op(OP_LE) || a.dep(1).is_op(OP_LT);
996  casadi_assert_dev(!left || !right);
997 
998  if (!left && !right)
999  return {a.dep(0), a.dep(1)}; // Simple inequality
1000 
1001  // We have a chain of inequalities
1002  bool ineq = !left;
1003  std::vector<MX> ret = {a.dep(!ineq)};
1004  MX e = a.dep(ineq);
1005  while (e.is_op(OP_LE) || e.is_op(OP_LT)) {
1006  casadi_assert_dev(!e.is_op(OP_EQ));
1007  casadi_assert_dev(!e.dep(!ineq).is_op(OP_LE) && !e.dep(!ineq).is_op(OP_LT));
1008  ret.push_back(e.dep(!ineq));
1009  e = e.dep(ineq);
1010  }
1011  ret.push_back(e);
1012  if (left) std::reverse(ret.begin(), ret.end());
1013  flipped = !left;
1014 
1015  return ret;
1016 }
1017 
1018 void OptiNode::assert_only_opti_symbols(const MX& e) const {
1019  std::vector<MX> symbols = MX::symvar(e);
1020  for (const auto& s : symbols) assert_has(s);
1021 }
1022 
1023 void OptiNode::assert_only_opti_nondual(const MX& e) const {
1024  std::vector<MX> symbols = MX::symvar(e);
1025  for (const auto& s : symbols) {
1026  assert_has(s);
1027  casadi_assert(meta(s).type!=OPTI_DUAL_G, "Dual variables forbidden in this context.");
1028  }
1029 }
1030 
1031 bool OptiNode::is_parametric(const MX& expr) const {
1032  return symvar(expr, OPTI_VAR).empty();
1033 }
1034 
1035 MetaCon OptiNode::canon_expr(const MX& expr, const DM& linear_scale) const {
1036  MX c = expr;
1037 
1038  MetaCon con;
1039  con.original = expr;
1040  casadi_assert(linear_scale.is_scalar() || linear_scale.size()==expr.size(),
1041  "Linear scale must have the same size as the expression. "
1042  "You got linear_scale " + con.linear_scale.dim() + " while " + expr.dim() + " is expected.");
1043  con.linear_scale = linear_scale;
1044 
1045  if (c.is_op(OP_LE) || c.is_op(OP_LT)) { // Inequalities
1046  std::vector<MX> ret;
1047  bool flipped;
1048  std::vector<MX> args = ineq_unchain(c, flipped);
1049  std::vector<bool> parametric;
1050  for (auto &a : args) parametric.push_back(is_parametric(a));
1051 
1052  if (args.size()==2 && (parametric[0] || parametric[1])) {
1053  // case: g(x,p) <= bound(p)
1054  MX e = args[0]-args[1];
1055  if (e.is_vector()) {
1056  casadi_assert(!parametric[0] || !parametric[1],
1057  "Constraint must contain decision variables.");
1058  if (problem_type_=="conic") {
1059  if (args[0].op()==OP_NORMF || args[0].op()==OP_NORM2) {
1060  args[0] = -soc(args[0].dep(), args[1]);
1061  args[1] = 0;
1062  }
1063  } else {
1064  con.type = OPTI_INEQUALITY;
1065  if (parametric[0]) {
1066  con.lb = args[0]*DM::ones(e.sparsity());
1067  con.ub = inf*DM::ones(e.sparsity());
1068  con.canon = args[1]*DM::ones(e.sparsity());
1069  } else {
1070  con.lb = -inf*DM::ones(e.sparsity());
1071  con.ub = args[1]*DM::ones(e.sparsity());
1072  con.canon = args[0]*DM::ones(e.sparsity());
1073  }
1074  return con;
1075  }
1076  }
1077  // Fall through to generic inequalities
1078  } else if (args.size()==3 && parametric[0] && parametric[2]) {
1079  // lb(p) <= g(x,p) <= ub(p)
1081  con.lb = args[0]*DM::ones(args[1].sparsity());
1082  con.ub = args[2]*DM::ones(args[1].sparsity());
1083  con.canon = args[1]*DM::ones(args[1].sparsity());
1084  con.flipped = flipped;
1085  con.n = 2;
1086  return con;
1087  }
1088 
1089  bool type_known = false;
1090  for (casadi_int j=0;j<args.size()-1;++j) {
1091  MX e = args[j]-args[j+1];
1092  if (problem_type_=="conic") {
1093  if (args[j].op()==OP_NORMF || args[j].op()==OP_NORM2) {
1094  args[j] = -soc(args[j].dep(), args[j+1]);
1095  args[j+1] = 0;
1096  e = args[j]-args[j+1];
1097  }
1098  }
1099  if (e.is_vector()) {
1100  // g1(x,p) <= g2(x,p)
1101  ret.push_back(e);
1102  casadi_assert_dev(!type_known || con.type==OPTI_GENERIC_INEQUALITY);
1103  type_known = true;
1105  con.flipped = flipped;
1106  } else {
1107  // A(x,p) >= b(p)
1108  MX a = args[j+1];
1109  MX b = args[j];
1110  e = a-b;
1111 
1112  casadi_assert(e.size1()==e.size2(),
1113  "Matrix inequalities must be square. Did you mean element-wise inequality instead?");
1114  if (a.is_scalar()) a*= MX::eye(e.size1());
1115  if (b.is_scalar()) b*= MX::eye(e.size1());
1116  e = a-b;
1117 
1118  ret.push_back(e);
1119  casadi_assert_dev(!type_known || con.type==OPTI_PSD);
1120  type_known = true;
1121  con.type = OPTI_PSD;
1122  }
1123  }
1124 
1125  if (con.type==OPTI_GENERIC_INEQUALITY) {
1126  con.canon = veccat(ret);
1127  con.lb = -inf*DM::ones(con.canon.sparsity());
1128  con.ub = DM::zeros(con.canon.sparsity());
1129  con.n = ret.size();
1130  if (!con.linear_scale.is_scalar()) {
1131  con.linear_scale = repmat(con.linear_scale, ret.size(), 1);
1132  }
1133  } else {
1134  con.canon = diagcat(ret);
1135  con.n = ret.size();
1136  }
1137  return con;
1138  } else if (c.is_op(OP_EQ)) { // Inequalities
1139  casadi_assert(!is_parametric(c.dep(0)) || !is_parametric(c.dep(1)),
1140  "Constraint must contain decision variables.");
1141  MX e = c.dep(0)-c.dep(1);
1142  if (is_parametric(c.dep(0))) {
1143  con.canon = c.dep(1)*DM::ones(e.sparsity());
1144  con.lb = c.dep(0)*DM::ones(e.sparsity());
1145  con.type = OPTI_EQUALITY;
1146  casadi_assert(c.dep(0).size1()<=c.dep(1).size1() && c.dep(0).size2()<=c.dep(1).size2(),
1147  "Constraint shape mismatch.");
1148  } else if (is_parametric(c.dep(1))) {
1149  con.canon = c.dep(0)*DM::ones(e.sparsity());
1150  con.lb = c.dep(1)*DM::ones(e.sparsity());
1151  con.type = OPTI_EQUALITY;
1152  casadi_assert(c.dep(1).size1()<=c.dep(0).size1() && c.dep(1).size2()<=c.dep(0).size2(),
1153  "Constraint shape mismatch.");
1154  } else {
1155  con.lb = DM::zeros(e.sparsity());
1156  con.canon = e;
1158  }
1159  con.ub = con.lb;
1160  return con;
1161  } else { // Something else
1162  con.type = OPTI_UNKNOWN;
1163  con.canon = c;
1164  return con;
1165  }
1166 
1167 }
1168 
1170  casadi_assert(solved(),
1171  "This action is forbidden since you have not solved the Opti stack yet "
1172  "(with calling 'solve').");
1173 }
1174 
1176  casadi_assert(!problem_dirty(),
1177  "This action is forbidden since you have not baked the Opti stack yet "
1178  "(with calling 'solve').");
1179 }
1180 
1182  casadi_assert_dev(g_.empty());
1183  casadi_assert_dev(f_.is_empty());
1184 }
1185 
1186 void OptiNode::minimize(const MX& f, double linear_scale) {
1187  assert_only_opti_nondual(f);
1189  casadi_assert(f.is_scalar(), "Objective must be scalar, got " + f.dim() + ".");
1190  f_ = f;
1191  f_linear_scale_ = linear_scale;
1192 }
1193 
1194 void OptiNode::subject_to(const MX& g, const DM& linear_scale, const Dict& options) {
1195  assert_only_opti_nondual(g);
1197  g_.push_back(g);
1198 
1199  casadi_assert(!g.is_empty(), "You passed an empty expression to `subject_to`. "
1200  "Make sure the number of rows and columns is non-zero. "
1201  "Got " + g.dim(true) + ".");
1202  casadi_assert(g.nnz()>0, "You passed a fully sparse expression to `subject_to`. "
1203  "Make sure the expression has at least one nonzero. "
1204  "Got " + g.dim(true) + ".");
1205  casadi_assert(!g.is_constant(), "You passed a constant to `subject_to`. "
1206  "You need a symbol to form a constraint.");
1207 
1208  // Store the meta-data
1209  set_meta_con(g, canon_expr(g, linear_scale));
1210  register_dual(meta_con(g));
1211 
1212  for (auto && it : options) {
1213  if (it.first=="stacktrace") {
1214  meta_con(g).extra["stacktrace"] = it.second.to_dict_vector();
1215  meta(meta_con(g).dual_canon).extra["stacktrace"] = it.second.to_dict_vector();
1216  } else if (it.first=="meta") {
1217  update_user_dict(g, it.second.to_dict());
1218  } else {
1219  casadi_error("Unknown option: " + it.first);
1220  }
1221  }
1222 }
1223 
1226  g_.clear();
1227  store_initial_[OPTI_DUAL_G].clear();
1228  store_latest_[OPTI_DUAL_G].clear();
1229  count_dual_ = 0;
1230 }
1231 
1232 std::vector<MX> OptiNode::symvar(const MX& expr, VariableType type) const {
1233  std::vector<MX> ret;
1234  for (const auto& d : symvar(expr)) {
1235  if (meta(d).type==type) ret.push_back(d);
1236  }
1237 
1238  return ret;
1239 }
1240 
1241 void OptiNode::res(const DMDict& res) {
1242  const std::vector<double> & x_v = res.at("x").nonzeros();
1243  for (const auto &v : active_symvar(OPTI_VAR)) {
1244  casadi_int i = meta(v).i;
1245  std::vector<double> & data_v = store_latest_[OPTI_VAR][i].nonzeros();
1246  for (casadi_int i=0;i<data_v.size();++i) {
1247  casadi_int j = meta(v).start+i;
1248  data_v[i] = x_v[j]*linear_scale_[j] + linear_scale_offset_[j];
1249  }
1250  }
1251  if (res.find("lam_g")!=res.end()) {
1252  const std::vector<double> & lam_v = res.at("lam_g").nonzeros();
1253  for (const auto &v : active_symvar(OPTI_DUAL_G)) {
1254  casadi_int i = meta(v).i;
1255  std::vector<double> & data_v = store_latest_[OPTI_DUAL_G][i].nonzeros();
1256  for (casadi_int i=0;i<data_v.size();++i) {
1257  casadi_int j = meta(v).start+i;
1258  j = index_all_to_g_.at(j);
1259  if (j<0) continue;
1260  data_v[i] = lam_v.at(j)/g_linear_scale_.at(j)*f_linear_scale_;
1261  }
1262  }
1263  }
1264  res_ = res;
1265  mark_solved();
1266 }
1267 
1268 bool OptiNode::old_callback() const {
1269  if (callback_.is_null()) return false;
1270  InternalOptiCallback* cb = static_cast<InternalOptiCallback*>(callback_.get());
1271  return !cb->associated_with(this);
1272 }
1273 
1275  if (problem_dirty()) {
1276  bake();
1277  }
1278 
1279  // Verify the constraint types
1280  for (const auto& g : g_) {
1281  if (problem_type_!="conic") {
1282  if (meta_con(g).type==OPTI_PSD)
1283  casadi_error("Psd constraints not implemented yet. "
1284  "Perhaps you intended an element-wise inequality? "
1285  "In that case, make sure that the matrix is flattened (e.g. mat(:)).");
1286  }
1287  }
1288 
1289  Dict solver_options_all = solver_options_;
1290 
1291  if (solver_options_all.find("equality")==solver_options_all.end()) {
1292  solver_options_all["equality"] = equality_;
1293  }
1294 
1295  if (solver_options_all.find("discrete")==solver_options_all.end()) {
1296  solver_options_all["discrete"] = discrete_;
1297  }
1298 
1299  Dict opts = solver_options_all;
1300 
1301  // Handle callbacks
1302  if (callback && user_callback_) {
1303  callback_ = Function::create(new InternalOptiCallback(*this), Dict());
1304  opts["iteration_callback"] = callback_;
1305  }
1306 
1307  casadi_assert(!solver_name_.empty(),
1308  "You must call 'solver' on the Opti stack to select a solver. "
1309  "Suggestion: opti.solver('ipopt')");
1310 
1311  if (problem_type_=="conic") {
1312  return qpsol("solver", solver_name_, nlp_, opts);
1313  } else {
1314  return nlpsol("solver", solver_name_, nlp_, opts);
1315  }
1316 
1317 }
1318 
1319 // Solve the problem
1320 OptiSol OptiNode::solve(bool accept_limit) {
1321 
1322  if (problem_dirty()) {
1323  bake();
1324  }
1325 
1326  bool solver_update = solver_dirty() || old_callback() || (user_callback_ && callback_.is_null());
1327 
1328  if (solver_update) {
1329  solver_ = solver_construct(true);
1330  mark_solver_dirty(false);
1331  }
1332 
1333  solve_prepare();
1334  res(solve_actual(arg_));
1335 
1336  std::string ret = return_status();
1337 
1338  casadi_assert(return_success(accept_limit),
1339  "Solver failed. You may use opti.debug.value to investigate the latest values of variables."
1340  " return_status is '" + ret + "'");
1341 
1342  return copy();
1343 }
1344 
1345 // Solve the problem
1347 
1348 
1349  // Verify the constraint types
1350  for (const auto& g : g_) {
1351  if (meta_con(g).type==OPTI_UNKNOWN)
1352  casadi_error("Constraint type unknown. Use ==, >= or <= .");
1353  }
1354 
1355  if (user_callback_) {
1356  InternalOptiCallback* cb = static_cast<InternalOptiCallback*>(callback_.get());
1357  cb->reset();
1358  }
1359 
1360  // Get initial guess and parameter values
1361  arg_["x0"] = (veccat(active_values(OPTI_VAR))-linear_scale_offset_)/linear_scale_;
1362  arg_["p"] = veccat(active_values(OPTI_PAR));
1363  arg_["lam_g0"] = veccat(active_values(OPTI_DUAL_G));
1364  if (!arg_["p"].is_regular()) {
1365  std::vector<MX> s = active_symvar(OPTI_PAR);
1366  std::vector<DM> v = active_values(OPTI_PAR);
1367  for (casadi_int i=0;i<s.size();++i) {
1368  casadi_assert(v[i].is_regular(),
1369  "You have forgotten to assign a value to a parameter ('set_value'), "
1370  "or have set it to NaN/Inf:\n" + describe(s[i], 1));
1371  }
1372  }
1373 
1374 
1375  // Evaluate bounds for given parameter values
1376  DMDict arg;
1377  arg["p"] = arg_["p"];
1378  DMDict res = bounds_(arg);
1379  arg_["lbg"] = res["lbg"];
1380  arg_["ubg"] = res["ubg"];
1381 
1382  stats_.clear();
1383 }
1384 
1386  return solver_(arg);
1387 }
1388 
1389 bool override_num(const std::map<casadi_int, MX> & temp, std::vector<DM>& num, casadi_int i) {
1390  // Override when values are supplied
1391  auto it = temp.find(i);
1392  if (it==temp.end()) {
1393  return true;
1394  } else {
1395  Slice all;
1396  DM t = static_cast<DM>(it->second);
1397  num.back().set(t, false, all, all);
1398  }
1399  return false;
1400 }
1401 
1402 
1403 DM OptiNode::value(const MX& expr, const std::vector<MX>& values, bool scaled) const {
1404 
1405  std::shared_ptr<ValueHelper> vh;
1406  if (!helpers_->incache(expr, vh)) {
1407  vh = std::make_shared<ValueHelper>();
1408  std::vector<MX> x = symvar(expr, OPTI_VAR);
1409  std::vector<MX> p = symvar(expr, OPTI_PAR);
1410  std::vector<MX> lam = symvar(expr, OPTI_DUAL_G);
1411  vh->x = x;
1412  vh->p = p;
1413  vh->lam = lam;
1414  vh->helper = Function("helper", std::vector<MX>{veccat(x), veccat(p), veccat(lam)}, {expr});
1415  if (vh->helper.has_free())
1416  casadi_error("This expression has symbols that are not defined "
1417  "within Opti using variable/parameter.");
1418  helpers_->tocache_if_missing(expr, vh);
1419  }
1420 
1421  const std::vector<MX>& x = vh->x;
1422  const std::vector<MX>& p = vh->p;
1423  const std::vector<MX>& lam = vh->lam;
1424  const Function& helper = vh->helper;
1425 
1426  std::map<VariableType, std::map<casadi_int, MX> > temp;
1427  temp[OPTI_DUAL_G] = std::map<casadi_int, MX>();
1428  for (const auto& v : values) {
1429  casadi_assert_dev(v.is_op(OP_EQ));
1430  casadi_int i = meta(v.dep(1)).i;
1431  casadi_assert_dev(v.dep(0).is_constant());
1432  temp[meta(v.dep(1)).type][i] = v.dep(0);
1433  }
1434 
1435  bool undecided_vars = false;
1436  std::vector<DM> x_num;
1437  for (const auto& e : x) {
1438  casadi_int i = meta(e).i;
1439  x_num.push_back(store_latest_.at(OPTI_VAR).at(i));
1440  undecided_vars |= override_num(temp[OPTI_VAR], x_num, i);
1441  if (scaled) {
1442  x_num.back() = x_num.back()/store_linear_scale_.at(OPTI_VAR)[meta(e).i] -
1443  store_linear_scale_offset_.at(OPTI_VAR)[meta(e).i];
1444  }
1445  }
1446 
1447  std::vector<DM> lam_num;
1448  for (const auto& e : lam) {
1449  casadi_int i = meta(e).i;
1450  casadi_assert(i<store_latest_.at(OPTI_DUAL_G).size(),
1451  "This expression has a dual for a constraint that is not given to Opti:\n" +
1452  describe(e, 1));
1453  lam_num.push_back(store_latest_.at(OPTI_DUAL_G).at(i));
1454  undecided_vars |= override_num(temp[OPTI_DUAL_G], lam_num, i);
1455  }
1456 
1457  std::vector<DM> p_num;
1458  for (const auto& e : p) {
1459  casadi_int i = meta(e).i;
1460  p_num.push_back(store_initial_.at(OPTI_PAR).at(i));
1461  override_num(temp[OPTI_PAR], p_num, i);
1462  casadi_assert(p_num.back().is_regular(),
1463  "This expression depends on a parameter with unset value:\n"+
1464  describe(e, 1));
1465  }
1466 
1467  if (undecided_vars) {
1468  assert_solved();
1469  for (const auto& e : x)
1470  casadi_assert(symbol_active_[meta(e).count],
1471  "This expression has symbols that do not appear in the constraints and objective:\n" +
1472  describe(e, 1));
1473  for (const auto& e : lam)
1474  casadi_assert(symbol_active_[meta(e).count],
1475  "This expression has a dual for a constraint that is not given to Opti:\n" +
1476  describe(e, 1));
1477  }
1478 
1479  std::vector<DM> arg = helper(std::vector<DM>{veccat(x_num), veccat(p_num), veccat(lam_num)});
1480  return arg[0];
1481 }
1482 
1483 void OptiNode::assert_active_symbol(const MX& m) const {
1484  assert_has(m);
1485  assert_baked();
1486  casadi_assert(symbol_active_[meta(m).count], "Opti symbol is not used in Solver."
1487  " It does not make sense to assign a value to it:\n" + describe(m, 1));
1488 }
1489 
1490 void OptiNode::set_initial(const std::vector<MX>& assignments) {
1491  for (const auto& v : assignments) {
1492  casadi_assert_dev(v.is_op(OP_EQ));
1493  casadi_assert_dev(v.dep(0).is_constant());
1494  if (has(v.dep(1)))
1495  set_initial(v.dep(1), static_cast<DM>(v.dep(0)));
1496  }
1497 }
1498 
1499 void OptiNode::set_domain(const MX& x, const std::string& domain) {
1500  mark_solved(false);
1502  casadi_assert(x.is_valid_input(), "First argument to set_domain should be a variable.");
1503  DomainType type;
1504  if (domain=="real") {
1505  type = OPTI_DOMAIN_REAL;
1506  } else if (domain=="integer") {
1507  type = OPTI_DOMAIN_INTEGER;
1508  } else {
1509  casadi_error("Unknown domain '" + domain + "'. Known values are 'real', 'integer'.");
1510  }
1511  for (const auto& prim : x.primitives()) {
1512  MetaVar& m = meta(prim);
1513  m.domain = type;
1514  }
1515 }
1516 
1517 void OptiNode::set_value(const std::vector<MX>& assignments) {
1518  for (const auto& v : assignments) {
1519  casadi_assert_dev(v.is_op(OP_EQ));
1520  casadi_assert_dev(v.dep(0).is_constant());
1521  if (has(v.dep(1)))
1522  set_value(v.dep(1), static_cast<DM>(v.dep(0)));
1523  }
1524 }
1525 
1526 void OptiNode::set_value_internal(const MX& x, const DM& v,
1527  std::map< VariableType, std::vector<DM> >& store) {
1528  mark_solved(false);
1529  casadi_assert_dev(v.is_regular());
1530  if (x.is_symbolic()) {
1531  DM& target = store[meta(x).type][meta(x).i];
1532  Slice all;
1533  target.set(v, false, all, all);
1534  return;
1535  }
1536 
1537  // Obtain symbolic primitives
1538  std::vector<MX> symbols = MX::symvar(x);
1539  MX symbols_cat = veccat(symbols);
1540 
1541  std::string failmessage = "You cannot set initial/value of an arbitrary expression. "
1542  "Use symbols or simple mappings of symbols.";
1543 
1544  // Assert x is linear in its symbolic primitives
1545  for (bool b : which_depends(x, symbols_cat, 2, false)) casadi_assert(!b, failmessage);
1546 
1547  // Evaluate jacobian of expr wrt symbols
1548  Dict opts = {{"compact", true}};
1549  Function Jf("Jf", std::vector<MX>{}, std::vector<MX>{jacobian(x, veccat(symbols), opts)});
1550  DM J = Jf(std::vector<DM>{})[0];
1551  Sparsity sp_JT = J.T().sparsity();
1552 
1553  Function Ff("Ff", symbols, {x});
1554  DM E = Ff(std::vector<DM>(symbols.size(), 0))[0];
1555  std::vector<double>& e = E.nonzeros();
1556 
1557  // Cast the v input into the expected sparsity
1558  Slice all;
1559  DM value(x.sparsity());
1560  value.set(v, false, all, all);
1561 
1562  // Purge empty rows
1563  std::vector<casadi_int> filled_rows = sum2(J).get_row();
1564  J = J(filled_rows, all); // NOLINT(cppcoreguidelines-slicing)
1565 
1566  // Get rows and columns of the mapping
1567  std::vector<casadi_int> row, col;
1568  J.sparsity().get_triplet(row, col);
1569  const std::vector<double>& scaling = J.nonzeros();
1570  const std::vector<double>& data_original = value.nonzeros();
1571 
1572  std::vector<double> data; data.reserve(value.nnz());
1573  for (casadi_int i=0;i<value.nnz();++i) {
1574  double v = data_original[i];
1575  casadi_int nz = sp_JT.colind()[i+1]-sp_JT.colind()[i];
1576  casadi_assert(nz<=1, failmessage);
1577  if (nz) {
1578  data.push_back(v);
1579  } else {
1580  casadi_assert(v==e[i], "In initial/value assignment: "
1581  "inconsistent numerical values. At nonzero " + str(i) + ", lhs has "
1582  + str(e[i]) + ", while rhs has " + str(v) + ".");
1583  }
1584  }
1585 
1586  // Contiguous workspace for nonzeros of all involved symbols
1587  std::vector<double> temp(symbols_cat.nnz(), casadi::nan);
1588  for (casadi_int k=0;k<data.size();++k) {
1589  double& lhs = temp[col[k]];
1590  double rhs = data[row[k]]/scaling[row[k]];
1591  if (std::isnan(lhs)) {
1592  // Assign in the workspace
1593  lhs = rhs;
1594  } else {
1595  casadi_assert(lhs==rhs, "Initial/value assignment with mapping is ambiguous.");
1596  }
1597  }
1598 
1599  casadi_int offset = 0;
1600  for (const auto & s : symbols) {
1601  DM& target = store[meta(s).type][meta(s).i];
1602  std::vector<double>& data = target.nonzeros();
1603  // Loop over nonzeros in each symbol
1604  for (casadi_int i=0;i<s.nnz();++i) {
1605  // Copy from the workspace (barring fields that were not set)
1606  double v = temp[offset+i];
1607  if (!std::isnan(v)) data[i] = v;
1608  }
1609  offset+=s.nnz();
1610  }
1611 
1612 }
1613 
1614 void OptiNode::set_initial(const MX& x, const DM& v) {
1615  for (const auto & s : MX::symvar(x))
1616  casadi_assert(meta(s).type!=OPTI_PAR,
1617  "You cannot set an initial value for a parameter. Did you mean 'set_value'?");
1618  set_value_internal(x, v, store_initial_);
1619 }
1620 
1621 void OptiNode::set_value(const MX& x, const DM& v) {
1622  for (const auto & s : MX::symvar(x))
1623  casadi_assert(meta(s).type!=OPTI_VAR,
1624  "You cannot set a value for a variable. Did you mean 'set_initial'?");
1625  set_value_internal(x, v, store_initial_);
1626 }
1627 
1628 void OptiNode::set_linear_scale(const MX& x, const DM& scale, const DM& offset) {
1629  for (const auto & s : MX::symvar(x))
1630  casadi_assert(meta(s).type!=OPTI_PAR,
1631  "You cannot set a scale value for a parameter.");
1632  casadi_assert(scale.is_scalar() || scale.size()==x.size(),
1633  "Dimension mismatch in linear_scale. Expected " + x.dim() + ", got " + scale.dim()+ ".");
1634  set_value_internal(x, scale, store_linear_scale_);
1635  casadi_assert(offset.is_scalar() || offset.size()==x.size(),
1636  "Dimension mismatch in linear_scale offset. Expected " + x.dim() +
1637  ", got " + scale.dim()+ ".");
1638  set_value_internal(x, offset, store_linear_scale_offset_);
1639 }
1640 
1641 std::vector<MX> OptiNode::active_symvar(VariableType type) const {
1642  if (symbol_active_.empty()) return std::vector<MX>{};
1643  std::vector<MX> ret;
1644  for (const auto& s : symbols_) {
1645  if (symbol_active_[meta(s).count] && meta(s).type==type)
1646  ret.push_back(s);
1647  }
1648  return ret;
1649 }
1650 
1651 std::vector<MX> OptiNode::symvar(VariableType type) const {
1652  if (symbols_.empty()) return std::vector<MX>{};
1653  std::vector<MX> ret;
1654  for (const auto& s : symbols_) {
1655  if (meta(s).type==type)
1656  ret.push_back(s);
1657  }
1658  return ret;
1659 }
1660 
1661 std::vector<DM> OptiNode::active_values(VariableType type) const {
1662  return active_values(type, store_initial_);
1663 }
1664 
1666  const std::map< VariableType, std::vector<DM> >& store) const {
1667  if (symbol_active_.empty()) return std::vector<DM>{};
1668  std::vector<DM> ret;
1669  for (const auto& s : symbols_) {
1670  if (symbol_active_[meta(s).count] && meta(s).type==type) {
1671  ret.push_back(store.at(meta(s).type)[meta(s).i]);
1672  }
1673  }
1674  return ret;
1675 }
1676 
1677 Function OptiNode::to_function(const std::string& name,
1678  const std::vector<MX>& args, const std::vector<MX>& res,
1679  const std::vector<std::string>& name_in,
1680  const std::vector<std::string>& name_out,
1681  const Dict& opts) {
1682  if (problem_dirty()) return baked_copy().to_function(name, args, res, name_in, name_out, opts);
1683 
1684  Function solver = solver_construct(false);
1685 
1686  // Get initial guess and parameter values
1687  std::vector<MX> x0, p, lam_g;
1691 
1692  casadi_int k = 0;
1693  for (const auto& a : args) {
1694  casadi_assert(a.is_valid_input(), "Argument " + str(k) + " is not purely symbolic.");
1695  k++;
1696  for (const auto& prim : a.primitives()) {
1697  if (!symbol_active_[meta(prim).count]) continue;
1698  casadi_int i = meta(prim).active_i;
1699  if (meta(prim).type==OPTI_VAR) {
1700  x0.at(i) = prim;
1701  } else if (meta(prim).type==OPTI_PAR) {
1702  p.at(i) = prim;
1703  } else if (meta(prim).type==OPTI_DUAL_G) {
1704  lam_g.at(i) = prim;
1705  } else {
1706  casadi_error("Unknown");
1707  }
1708  }
1709  }
1710  MXDict arg;
1711  arg["p"] = veccat(p);
1712 
1713  // Evaluate bounds for given parameter values
1714  MXDict r = bounds_(arg);
1715  arg["x0"] = veccat(x0);
1716  arg["lam_g0"] = veccat(lam_g);
1717  arg["lbg"] = r["lbg"];
1718  arg["ubg"] = r["ubg"];
1719 
1720  r = solver(arg);
1721 
1722  std::vector<MX> helper_in = {veccat(active_symvar(OPTI_VAR)),
1723  veccat(active_symvar(OPTI_PAR)),
1724  veccat(active_symvar(OPTI_DUAL_G))};
1725  Function helper("helper", helper_in, {res});
1726 
1727  std::vector<MX> arg_in = helper(std::vector<MX>{r.at("x"), arg["p"], r.at("lam_g")});
1728 
1729  return Function(name, args, arg_in, name_in, name_out, opts);
1730 
1731 }
1732 
1733 void OptiNode::disp(std::ostream &stream, bool more) const {
1734 
1735 }
1736 
1737 casadi_int OptiNode::instance_number() const {
1738  return instance_number_;
1739 }
1740 
1741 void OptiNode::show_infeasibilities(double tol, const Dict& opts) const {
1742  stats();
1743  std::vector<double> lbg_ = value(lbg()).get_elements();
1744  std::vector<double> ubg_ = value(ubg()).get_elements();
1745  std::vector<double> g_scaled_ = value(nlp_.at("g"), std::vector<MX>(), true).get_elements();
1746  std::vector<double> lbg_scaled_ = value(bounds_lbg_, std::vector<MX>(), true).get_elements();
1747  std::vector<double> ubg_scaled_ = value(bounds_ubg_, std::vector<MX>(), true).get_elements();
1748 
1749 
1750  std::vector<double> g_ = value(g()).get_elements();
1751  uout() << "Violated constraints (tol " << tol << "), in order of declaration:" << std::endl;
1752 
1753  for (casadi_int i=0;i<g_.size();++i) {
1754  double err = std::max(g_[i]-ubg_[i], lbg_[i]-g_[i]);
1755  double err_scaled = std::max(g_scaled_[i]-ubg_scaled_[i], lbg_scaled_[i]-g_scaled_[i]);
1756  if (err>=tol) {
1757  uout() << "------- i = " << i+GlobalOptions::start_index;
1758  uout() << "/" << g_.size();
1759 
1760  if (reduced_) {
1761  if (is_simple_[i]) {
1762  if (GlobalOptions::start_index==0) {
1763  uout() << " reduced to bound on x[" << g_index_reduce_x_.at(i) << "]";
1764  } else {
1765  uout() << " reduced to bound on x(" << g_index_reduce_x_.at(i)+1 << ")";
1766  }
1767  } else {
1768  uout() << " reduced to g[" << g_index_reduce_g_.at(i) << "]";
1769  }
1770  }
1771 
1772  uout() << " ------ " << std::endl;
1773  uout() << lbg_[i] << " <= " << g_[i] << " <= " << ubg_[i];
1774  uout() << " (viol " << err << ")" << std::endl;
1775  if (g_[i]!=g_scaled_[i]) {
1776  uout() << lbg_scaled_[i] << " <= " << g_scaled_[i] << " <= " << ubg_scaled_[i];
1777  uout() << " (scaled) (viol " << err_scaled << ")" << std::endl;
1778  }
1779  uout() << g_describe(i, opts) << std::endl;
1780  }
1781  }
1782 }
1783 
1784 } // namespace casadi
static std::unique_ptr< std::istream > ifstream_ptr(const std::string &path, std::ios_base::openmode mode=std::ios_base::in, bool fail=true)
Definition: filesystem.cpp:135
FunctionInternal(const std::string &name)
Constructor.
Function object.
Definition: function.hpp:60
FunctionInternal * get() const
Definition: function.cpp:353
const std::vector< std::string > & name_in() const
Get input scheme.
Definition: function.cpp:961
const std::string & name() const
Name of the function.
Definition: function.cpp:1315
static Function create(FunctionInternal *node)
Create from node.
Definition: function.cpp:336
Dict stats(int mem=0) const
Get all statistics obtained at the end of the last evaluate call.
Definition: function.cpp:928
const std::vector< std::string > & name_out() const
Get output scheme.
Definition: function.cpp:965
casadi_int numel() const
Get the number of elements.
bool is_dense() const
Check if the matrix expression is dense.
bool is_empty(bool both=false) const
Check if the sparsity is empty, i.e. if one of the dimensions is zero.
std::pair< casadi_int, casadi_int > size() const
Get the shape.
bool is_vector() const
Check if the matrix is a row or column vector.
casadi_int nnz() const
Get the number of (structural) non-zero elements.
casadi_int size2() const
Get the second dimension (i.e. number of columns)
casadi_int size1() const
Get the first dimension (i.e. number of rows)
std::string dim(bool with_nz=false) const
Get string representation of dimensions.
static MatType ones(casadi_int nrow=1, casadi_int ncol=1)
Create a dense matrix or a matrix with specified sparsity with all entries one.
static MX sym(const std::string &name, casadi_int nrow=1, casadi_int ncol=1)
Create an nrow-by-ncol symbolic primitive.
const casadi_int * colind() const
Get the sparsity pattern. See the Sparsity class for details.
static MatType zeros(casadi_int nrow=1, casadi_int ncol=1)
Create a dense matrix or a matrix with specified sparsity with all entries zero.
bool is_scalar(bool scalar_and_dense=false) const
Check if the matrix expression is scalar.
bool is_null() const
Is a null pointer?
static casadi_int start_index
MX - Matrix expression.
Definition: mx.hpp:92
bool is_valid_input() const
Check if matrix can be used to define function inputs.
Definition: mx.cpp:925
const Sparsity & sparsity() const
Get the sparsity pattern.
Definition: mx.cpp:592
std::string name() const
Get the name.
Definition: mx.cpp:762
static std::vector< MX > symvar(const MX &x)
Definition: mx.cpp:1938
bool is_constant() const
Check if constant.
Definition: mx.cpp:770
static MX eye(casadi_int n)
Identity matrix.
Definition: mx.cpp:580
MXNode * get() const
Get a const pointer to the node.
Definition: mx.cpp:544
bool is_op(casadi_int op) const
Is it a certain operation.
Definition: mx.cpp:794
std::vector< MX > primitives() const
Get primitives.
Definition: mx.cpp:933
MX dep(casadi_int ch=0) const
Get the nth dependency as MX.
Definition: mx.cpp:754
bool is_symbolic() const
Check if symbolic.
Definition: mx.cpp:766
std::vector< Scalar > & nonzeros()
Matrix< Scalar > T() const
Transpose the matrix.
const Sparsity & sparsity() const
Const access the sparsity - reference to data member.
void set(const Matrix< Scalar > &m, bool ind1, const Slice &rr)
static Matrix< double > nan(const Sparsity &sp)
create a matrix with all nan
bool is_regular() const
Checks if expression does not contain NaN or Inf.
std::vector< Scalar > get_elements() const
Get all elements.
MX x_lookup(casadi_index i) const
Definition: optistack.cpp:605
std::string describe(const MX &x, casadi_index indent=0, const Dict &opts=Dict()) const
Definition: optistack.cpp:659
MX g_lookup(casadi_index i) const
Definition: optistack.cpp:637
std::string x_describe(casadi_index i, const Dict &opts=Dict()) const
Definition: optistack.cpp:645
std::string g_describe(casadi_index i, const Dict &opts=Dict()) const
Definition: optistack.cpp:652
A simplified interface for NLP modeling/solving.
Function solver_construct(bool callback=true)
Dict user_dict(const MX &m) const
casadi_int g_index_reduce_x(casadi_int i) const
std::vector< MX > value_variables() const
get assignment expressions for latest values
std::vector< MX > initial() const
get assignment expressions for initial values
std::vector< MX > active_symvar(VariableType type) const
std::string describe(const MX &x, casadi_int indent=0, const Dict &opts=Dict()) const
MX g() const
Get all (scalarised) constraint expressions as a column vector.
MetaCon get_meta_con(const MX &m) const
Get meta-data of symbol (for internal use only)
OptiAdvanced baked_copy() const
MX dual(const MX &m) const
get the dual variable
std::vector< DM > active_values(VariableType type) const
MX x() const
Get all (scalarised) decision variables as a symbolic column vector.
void minimize(const MX &f, double linear_scale=1)
Set objective.
casadi_int nx() const
Number of (scalarised) decision variables.
MX variable(casadi_int n=1, casadi_int m=1, const std::string &attribute="full")
Create a decision variable (symbol)
std::string x_describe(casadi_int i, const Dict &opts=Dict()) const
void bake()
Fix the structure of the optimization problem.
casadi_int instance_number() const
DM value(const MX &x, const std::vector< MX > &values=std::vector< MX >(), bool scaled=false) const
void disp(std::ostream &stream, bool more=false) const override
Print representation.
void set_domain(const MX &x, const std::string &domain)
Set domain of variable.
OptiSol solve(bool accept_limit)
Crunch the numbers; solve the problem.
casadi_int np() const
Number of (scalarised) parameters.
bool has_callback_class() const
MX g_lookup(casadi_int i) const
casadi_int ng() const
Number of (scalarised) constraints.
MetaVar get_meta(const MX &m) const
Get meta-data of symbol (for internal use only)
void subject_to()
Clear constraints.
std::vector< MX > symvar() const
void update_user_dict(const MX &m, const Dict &meta)
add meta-data of an expression
void set_meta(const MX &m, const MetaVar &meta)
Set meta-data of an expression.
MX parameter(casadi_int n=1, casadi_int m=1, const std::string &attribute="full")
Create a parameter (symbol); fixed during optimization.
bool is_parametric(const MX &expr) const
return true if expression is only dependant on Opti parameters, not variables
void assert_active_symbol(const MX &m) const
bool return_success(bool accept_limit) const
Did the solver return successfully?
std::vector< MX > value_parameters() const
bool problem_dirty() const
Function to_function(const std::string &name, const std::vector< MX > &args, const std::vector< MX > &res, const std::vector< std::string > &name_in, const std::vector< std::string > &name_out, const Dict &opts)
Create a CasADi Function from the Opti solver.
Opti copy() const
Copy.
DM x_linear_scale_offset() const
MX x_lookup(casadi_int i) const
casadi_int g_index_reduce_g(casadi_int i) const
void mark_problem_dirty(bool flag=true)
OptiNode(const std::string &problem_type)
Create Opti Context.
std::string return_status() const
Get return status of solver.
void set_initial(const MX &x, const DM &v)
void mark_solver_dirty(bool flag=true)
MX p() const
Get all (scalarised) parameters as a symbolic column vector.
casadi_int g_index_unreduce_g(casadi_int i) const
void set_value(const MX &x, const DM &v)
Set value of parameter.
double f_linear_scale() const
MX lam_g() const
Get dual variables as a symbolic column vector.
void set_linear_scale(const MX &x, const DM &scale, const DM &offset)
Set scale of a decision variable.
std::string g_describe(casadi_int i, const Dict &opts=Dict()) const
friend class InternalOptiCallback
void solver(const std::string &solver, const Dict &plugin_options=Dict(), const Dict &solver_options=Dict())
Solver.
MX f() const
Get objective expression.
void mark_solved(bool flag=true)
Function scale_helper(const Function &h) const
Scale a helper function constructed via opti.x, opti.g, ...
Function casadi_solver() const
Get the underlying CasADi solver of the Opti stack.
static OptiNode * create(const std::string &problem_type)
MetaCon canon_expr(const MX &expr, const DM &linear_scale=1) const
Interpret an expression (for internal use only)
void show_infeasibilities(double tol=0, const Dict &opts=Dict()) const
DMDict solve_actual(const DMDict &args)
Dict stats() const
Get statistics.
void set_meta_con(const MX &m, const MetaCon &meta)
Set meta-data of an expression.
A simplified interface for NLP modeling/solving.
Definition: optistack.hpp:662
A simplified interface for NLP modeling/solving.
Definition: optistack.hpp:90
Function scale_helper(const Function &h) const
Scale a helper function constructed via opti.x, opti.g, ...
Definition: optistack.cpp:254
Function to_function(const std::string &name, const std::vector< MX > &args, const std::vector< MX > &res, const Dict &opts=Dict())
Create a CasADi Function from the Opti solver.
Definition: optistack.cpp:435
static Opti create(OptiNode *node)
Definition: optistack.cpp:86
void clear_mem()
Clear all memory (called from destructor)
Class representing a Slice.
Definition: slice.hpp:48
General sparsity class.
Definition: sparsity.hpp:106
casadi_int size1() const
Get the number of rows.
Definition: sparsity.cpp:124
static Sparsity dense(casadi_int nrow, casadi_int ncol=1)
Create a dense rectangular sparsity pattern *.
Definition: sparsity.cpp:1012
casadi_int nnz() const
Get the number of (structural) non-zeros.
Definition: sparsity.cpp:148
casadi_int size2() const
Get the number of columns.
Definition: sparsity.cpp:128
static Sparsity lower(casadi_int n)
Create a lower triangular square sparsity pattern *.
Definition: sparsity.cpp:1049
Function qpsol(const std::string &name, const std::string &solver, const SXDict &qp, const Dict &opts)
Definition: conic.cpp:261
Function nlpsol(const std::string &name, const std::string &solver, const SXDict &nlp, const Dict &opts)
Definition: nlpsol.cpp:118
casadi_int nlpsol_n_out()
Number of NLP solver outputs.
Definition: nlpsol.cpp:263
std::vector< std::string > nlpsol_out()
Get NLP solver output scheme of NLP solvers.
Definition: nlpsol.cpp:206
The casadi namespace.
Definition: archiver.cpp:28
std::map< std::string, MX > MXDict
Definition: mx.hpp:1009
bool override_num(const std::map< casadi_int, MX > &temp, std::vector< DM > &num, casadi_int i)
T get_from_dict(const std::map< std::string, T > &d, const std::string &key, const T &default_value)
std::string description(Category v)
double if_else_zero(double x, double y)
Conditional assignment.
Definition: calculus.hpp:289
void assign_vector(const std::vector< S > &s, std::vector< D > &d)
@ OPTI_VAR
Definition: optistack.hpp:496
@ OPTI_DUAL_G
Definition: optistack.hpp:498
@ OPTI_PAR
Definition: optistack.hpp:497
double sign(double x)
Sign function, note that sign(nan) == nan.
Definition: calculus.hpp:264
std::vector< casadi_int > find(const std::vector< T > &v)
find nonzeros
@ OPTI_DOMAIN_INTEGER
Definition: optistack.hpp:502
@ OPTI_DOMAIN_REAL
Definition: optistack.hpp:501
std::string str(const T &v)
String representation, any type.
GenericType::Dict Dict
C++ equivalent of Python's dict or MATLAB's struct.
const double inf
infinity
Definition: calculus.hpp:50
bool any(const std::vector< bool > &v)
Check if any arguments are true.
Definition: casadi_misc.cpp:84
@ OPTI_DOUBLE_INEQUALITY
Definition: optistack.hpp:492
@ OPTI_INEQUALITY
Definition: optistack.hpp:491
@ OPTI_GENERIC_INEQUALITY
Definition: optistack.hpp:489
@ OPTI_EQUALITY
Definition: optistack.hpp:490
@ OPTI_GENERIC_EQUALITY
Definition: optistack.hpp:488
@ OPTI_UNKNOWN
Definition: optistack.hpp:494
@ OPTI_PSD
Definition: optistack.hpp:493
const double nan
Not a number.
Definition: calculus.hpp:53
bool all(const std::vector< bool > &v)
Check if all arguments are true.
Definition: casadi_misc.cpp:77
Matrix< double > DM
Definition: dm_fwd.hpp:33
bool is_regular(const std::vector< T > &v)
Checks if array does not contain NaN or Inf.
std::ostream & uout()
std::map< std::string, DM > DMDict
Definition: dm_fwd.hpp:36
@ OP_LT
Definition: calculus.hpp:70
@ OP_EQ
Definition: calculus.hpp:70
@ OP_NORM2
Definition: calculus.hpp:178
@ OP_LE
Definition: calculus.hpp:70
@ OP_NORMF
Definition: calculus.hpp:178
std::string filename(const std::string &path)
Definition: ghc.cpp:55
ConstraintType type
Definition: optistack.hpp:515
casadi_int n
Definition: optistack.hpp:518
casadi_int active_i
Definition: optistack.hpp:533
std::string attribute
Definition: optistack.hpp:526
DomainType domain
Definition: optistack.hpp:530
VariableType type
Definition: optistack.hpp:529
casadi_int count
Definition: optistack.hpp:531
casadi_int i
Definition: optistack.hpp:532
casadi_int m
Definition: optistack.hpp:528
casadi_int n
Definition: optistack.hpp:527