optistack_internal.cpp
1 /*
2  * This file is part of CasADi.
3  *
4  * CasADi -- A symbolic framework for dynamic optimization.
5  * Copyright (C) 2010-2023 Joel Andersson, Joris Gillis, Moritz Diehl,
6  * KU Leuven. All rights reserved.
7  * Copyright (C) 2011-2014 Greg Horn
8  *
9  * CasADi is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 3 of the License, or (at your option) any later version.
13  *
14  * CasADi is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with CasADi; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  *
23  */
24 
25 #include "optistack_internal.hpp"
26 #include "nlpsol.hpp"
27 #include "conic.hpp"
28 #include "function_internal.hpp"
29 #include "global_options.hpp"
30 
31 namespace casadi {
32 
33 class InternalOptiCallback : public FunctionInternal {
34  public:
35 
36  InternalOptiCallback(OptiNode& sol) : FunctionInternal(class_name()), sol_(sol) {}
37 
38  ~InternalOptiCallback() override {
39  clear_mem();
40  }
41 
43  std::string class_name() const override {return "InternalOptiCallback";}
44 
45  // Number of inputs and outputs
46  size_t get_n_in() override { return nlpsol_n_out();}
47 
48  Sparsity get_sparsity_in(casadi_int i) override {
49  std::string n = nlpsol_out(i);
50  casadi_int size = 0;
51  if (n=="f") {
52  size = 1;
53  } else if (n=="lam_x" || n=="x") {
54  size = sol_.nx();
55  } else if (n=="lam_g" || n=="g") {
56  size = sol_.ng();
57  } else if (n=="p" || n=="lam_p") {
58  size = sol_.np();
59  if (size==0) return Sparsity::dense(0, 0);
60  } else {
61  return Sparsity::dense(0, 0);
62  }
63  return Sparsity::dense(size, 1);
64  }
65 
66  void reset() { i=0; }
67 
68  // eval_dm has been defined instead of eval
69  bool has_eval_dm() const override { return true;}
70 
72  std::vector<DM> eval_dm(const std::vector<DM>& arg) const override {
73  DMDict r;
74 
75  for (casadi_int i=0;i<nlpsol_n_out();++i) {
76  r[nlpsol_out(i)] = arg[i];
77  }
78 
79  sol_.res(r);
80 
81  if (sol_.user_callback_) sol_.user_callback_->call(i);
82 
83  i+=1;
84  return {0};
85  }
86 
87  bool associated_with(const OptiNode* o) { return &sol_==o; }
88 
89  private:
90  OptiNode& sol_;
91  mutable casadi_int i;
92 };
93 
94 OptiNode* OptiNode::create(const std::string& problem_type) {
95 return new OptiNode(problem_type);
96 }
97 
98 
100  user_callback_ = callback;
101 }
102 
104  user_callback_ = nullptr;
105 }
106 
108  return user_callback_ != 0;
109 }
110 
111 std::string OptiNode::format_stacktrace(const Dict& stacktrace, casadi_int indent) {
112  std::string s_indent;
113  for (casadi_int i=0;i<indent;++i) {
114  s_indent+= " ";
115  }
116  std::string description;
117  std::string filename = stacktrace.at("file").as_string();
118  casadi_int line = stacktrace.at("line").as_int();
119  description += "called from " + filename +":"+str(line);
120  std::string name = stacktrace.at("name").as_string();
121  if (name!="Unknown" && name!= "<module>")
122  description += " in " + stacktrace.at("name").as_string();
123  try {
124  std::ifstream file(filename);
125  for (casadi_int i=0; i<line-1; ++i) {
126  file.ignore(std::numeric_limits<std::streamsize>::max(), '\n');
127  }
128  std::string contents; std::getline(file, contents);
129  auto it = contents.find_first_not_of(" \n");
130  if (it!=std::string::npos) {
131  description += "\n" + s_indent + contents.substr(it);
132  }
133  } catch(...) {
134  // pass
135  }
136  return description;
137 }
138 
139 std::string OptiNode::describe(const MX& expr, casadi_int indent, const Dict& opts) const {
140  if (problem_dirty()) return baked_copy().describe(expr, indent, opts);
141  casadi_int max_stacktrace_depth = 1;
142  for (const auto& op : opts) {
143  if (op.first=="max_stacktrace_depth") {
144  max_stacktrace_depth = op.second.as_int();
145  } else {
146  casadi_warning("Unknown option '" + op.first + "'");
147  }
148  }
149  std::string s_indent;
150  for (casadi_int i=0;i<indent;++i) {
151  s_indent+= " ";
152  }
153  std::string description = s_indent;
154  if (expr.is_symbolic()) {
155  if (has(expr)) {
156  description += "Opti " + variable_type_to_string(meta(expr).type) + " '" + expr.name() +
157  "' of shape " + expr.dim();
158  const Dict& extra = meta(expr).extra;
159  auto it = extra.find("stacktrace");
160  if (it!=extra.end()) {
161  for (const Dict& stacktrace : it->second.as_dict_vector()) {
162  description += ", " + format_stacktrace(stacktrace, indent+1);
163  if (--max_stacktrace_depth==0) break;
164  }
165  }
166  } else {
167  VariableType vt;
168  if (parse_opti_name(expr.name(), vt)) {
169  description += "Opti " + variable_type_to_string(vt) + " '" + expr.name() +
170  "' of shape " + expr.dim()+
171  ", belonging to a different instance of Opti.";
172  } else {
173  description += "MX symbol '" + expr.name() + "' of shape " + expr.dim();
174  description += ", declared outside of Opti.";
175  }
176  }
177  } else {
178  if (has_con(expr)) {
179  description = "Opti constraint of shape " + expr.dim();
180  const Dict& extra = meta_con(expr).extra;
181  auto it = extra.find("stacktrace");
182  if (it!=extra.end()) {
183  for (const Dict& stacktrace : it->second.as_dict_vector()) {
184  description += ", " + format_stacktrace(stacktrace, indent+1);
185  if (--max_stacktrace_depth==0) break;
186  }
187  }
188  } else {
189  std::vector<MX> s = symvar(expr);
190  if (s.empty()) {
191  description+= "Constant epxression.";
192  } else {
193  description+= "General expression, dependent on " + str(s.size()) + " symbols:";
194  for (casadi_int i=0;i<s.size();++i) {
195  description+= "\n"+describe(s[i], indent+1);
196  if (i>5) {
197  description+= "\n...";
198  break;
199  }
200  }
201  }
202  }
203  }
204 
205  return description;
206 }
207 
208 std::string OptiNode::g_describe(casadi_int i, const Dict& opts) const {
209  if (problem_dirty()) return baked_copy().g_describe(i, opts);
210  MX expr = g_lookup(i);
211  casadi_int local_i = i-meta_con(expr).start + GlobalOptions::start_index;
212  std::string description = describe(expr, 0, opts);
213  if (expr.numel()>1)
214  description += "\nAt nonzero " + str(local_i % expr.numel()) +
215  ", part " + str((casadi_int) local_i / expr.numel()) + ".";
216  return description;
217 }
218 
219 std::string OptiNode::x_describe(casadi_int i, const Dict& opts) const {
220  if (problem_dirty()) return baked_copy().x_describe(i, opts);
221  MX symbol = x_lookup(i);
222  casadi_int local_i = i-meta(symbol).start + GlobalOptions::start_index;
223  std::string description = describe(symbol, 0, opts);
224  if (symbol.numel()>1)
225  description += "\nAt nonzero " + str(local_i) + ".";
226  return description;
227 }
228 
229 MX OptiNode::x_lookup(casadi_int i) const {
230  if (problem_dirty()) return baked_copy().x_lookup(i);
231  casadi_assert_dev(i>=0);
232  casadi_assert_dev(i<nx());
233  std::vector<MX> x = active_symvar(OPTI_VAR);
234  for (const auto& e : x) {
235  const MetaVar& m = meta(e);
236  if (i>=m.start && i<m.stop) return e;
237  }
238  casadi_error("Internal error");
239  return MX();
240 }
241 
242 MX OptiNode::g_lookup(casadi_int i) const {
243  if (problem_dirty()) return baked_copy().g_lookup(i);
244  casadi_assert_dev(i>=0);
245  casadi_assert_dev(i<ng());
246  for (const auto& e : g_) {
247  const MetaCon& m = meta_con(e);
248  if (i>=m.start && i<m.stop) return e;
249  }
250  casadi_error("Internal error");
251  return MX();
252 }
253 
254 OptiNode::OptiNode(const std::string& problem_type) :
255  count_(0), count_var_(0), count_par_(0), count_dual_(0) {
256  f_ = 0;
257  f_linear_scale_ = 1;
258  instance_number_ = instance_count_++;
259  user_callback_ = nullptr;
260  store_initial_[OPTI_VAR] = {};
261  store_initial_[OPTI_PAR] = {};
262  store_initial_[OPTI_DUAL_G] = {};
263  store_latest_[OPTI_VAR] = {};
264  store_latest_[OPTI_DUAL_G] = {};
265  casadi_assert(problem_type=="nlp" || problem_type=="conic",
266  "Specified problem type '" + problem_type + "'unknown. "
267  "Choose 'nlp' (default) or 'conic'.");
268  problem_type_ = problem_type;
270 }
271 
273 }
274 
275 MX OptiNode::variable(casadi_int n, casadi_int m, const std::string& attribute) {
276 
277  // Prepare metadata
278  MetaVar meta_data;
279  meta_data.attribute = attribute;
280  meta_data.n = n;
281  meta_data.m = m;
282  meta_data.type = OPTI_VAR;
283  meta_data.count = count_++;
284  meta_data.i = count_var_++;
285  meta_data.domain = OPTI_DOMAIN_REAL;
286 
287  MX symbol, ret;
288 
289  if (attribute=="symmetric") {
290  casadi_assert(n==m, "You specified attribute 'symmetric', "
291  "while matrix is not even square, but " + str(n) + "-by-" + str(m) + ".");
292  symbol = MX::sym(name_prefix() + "x_" + str(count_var_), n*(n+1)/2);
293  ret = tril2symm(MX(Sparsity::lower(n), symbol));
294  } else if (attribute=="full") {
295  symbol = MX::sym(name_prefix() + "x_" + str(count_var_), n, m);
296  ret = symbol;
297  } else {
298  casadi_error("Unknown attribute '" + attribute + "'. Choose from 'full' or 'symmetric'.");
299  }
300 
301  // Store the symbol; preventing it from going ut of scope
302  symbols_.push_back(symbol);
303  store_initial_[OPTI_VAR].push_back(DM::zeros(symbol.sparsity()));
304  store_latest_[OPTI_VAR].push_back(DM::nan(symbol.sparsity()));
305  store_linear_scale_[OPTI_VAR].push_back(DM::ones(symbol.sparsity()));
306  store_linear_scale_offset_[OPTI_VAR].push_back(DM::zeros(symbol.sparsity()));
307 
308  set_meta(symbol, meta_data);
309  return ret;
310 }
311 
312 MX OptiNode::variable(const MX& symbol, const std::string& attribute) {
313 
314  // Prepare metadata
315  MetaVar meta_data;
316  meta_data.attribute = attribute;
317  meta_data.n = symbol.size1();
318  meta_data.m = symbol.size2();
319  meta_data.type = OPTI_VAR;
320  meta_data.count = count_++;
321  meta_data.i = count_var_++;
322 
323  // Store the symbol; preventing it from going ut of scope
324  symbols_.push_back(symbol);
325  store_initial_[OPTI_VAR].push_back(DM::zeros(symbol.sparsity()));
326  store_latest_[OPTI_VAR].push_back(DM::nan(symbol.sparsity()));
327  store_linear_scale_[OPTI_VAR].push_back(DM::ones(symbol.sparsity()));
328  store_linear_scale_offset_[OPTI_VAR].push_back(DM::zeros(symbol.sparsity()));
329 
330  set_meta(symbol, meta_data);
331  return symbol;
332 }
333 
334 MX OptiNode::variable(const Sparsity& sp, const std::string& attribute) {
335 
336  // Prepare metadata
337  MetaVar meta_data;
338  meta_data.attribute = attribute;
339  meta_data.n = sp.size1();
340  meta_data.m = sp.size2();
341  meta_data.type = OPTI_VAR;
342  meta_data.count = count_++;
343  meta_data.i = count_var_++;
344 
345  MX symbol = MX::sym(name_prefix() + "x_" + str(count_var_), sp);
346 
347  // Store the symbol; preventing it from going ut of scope
348  symbols_.push_back(symbol);
349  store_initial_[OPTI_VAR].push_back(DM::zeros(symbol.sparsity()));
350  store_latest_[OPTI_VAR].push_back(DM::nan(symbol.sparsity()));
351  store_linear_scale_[OPTI_VAR].push_back(DM::ones(symbol.sparsity()));
352  store_linear_scale_offset_[OPTI_VAR].push_back(DM::zeros(symbol.sparsity()));
353 
354  set_meta(symbol, meta_data);
355  return symbol;
356 }
357 
358 std::string OptiNode::name_prefix() const {
359  return "opti" + str(instance_number_) + "_";
360 }
361 
363  return Opti::create(new OptiNode(*this));
364 }
365 
366 void OptiNode::register_dual(MetaCon& c) {
367 
368  // Prepare metadata
369  MetaVar meta_data;
370  meta_data.attribute = "full";
371  meta_data.n = c.canon.size1();
372  meta_data.m = c.canon.size2();;
373  meta_data.type = OPTI_DUAL_G;
374  meta_data.count = count_++;
375  meta_data.i = count_dual_++;
376 
377  MX symbol, ret;
378  if (c.type==OPTI_PSD) {
379  symbol = MX();
380  ret = MX();
381  } else {
382  symbol = MX::sym(name_prefix()+"lam_g_"+str(count_dual_), c.canon.sparsity());
383 
384  casadi_assert_dev(c.canon.is_dense());
385 
386  Sparsity ret_sp = repmat(c.original.sparsity(), 1, c.n);
387 
388  casadi_int N = c.canon.sparsity().nnz();
389 
390  MX flat = vec(symbol);
391  if (c.type==OPTI_DOUBLE_INEQUALITY) {
392  MX v = MX::sym("v");
393  MX decide_left_right = vertcat(if_else_zero(v<0, -v), if_else_zero(v>=0, v));
394  Function sign = Function("sign", {v}, {decide_left_right});
395  Function sign_map = sign.map(c.canon.sparsity().nnz());
396  ret = MX(ret_sp, sign_map((c.flipped ? -1 : 1)*flat)[0].T());
397  } else {
398  casadi_int block_size = N / c.n;
399  std::vector<MX> original_blocks = vertsplit(fabs(flat), block_size);
400  std::vector<MX> blocks(N);
401  for (casadi_int i=0;i<c.n;++i) {
402  casadi_int p = c.flipped? c.n-i-1: i;
403  blocks[p] = original_blocks[i];
404  }
405  ret = MX(ret_sp, vertcat(blocks));
406  }
407  }
408 
409  symbols_.push_back(symbol);
410  store_initial_[OPTI_DUAL_G].push_back(DM::zeros(symbol.sparsity()));
411  store_latest_[OPTI_DUAL_G].push_back(DM::nan(symbol.sparsity()));
412 
413  c.dual = ret;
414  c.dual_canon = symbol;
415 
416  set_meta(symbol, meta_data);
417 }
418 
419 MX OptiNode::parameter(const MX& symbol, const std::string& attribute) {
420  casadi_assert_dev(attribute=="full");
421 
422  // Prepare metadata
423  MetaVar meta_data;
424  meta_data.attribute = attribute;
425  meta_data.n = symbol.size1();
426  meta_data.m = symbol.size2();
427  meta_data.type = OPTI_PAR;
428  meta_data.count = count_++;
429  meta_data.i = count_par_++;
430 
431  symbols_.push_back(symbol);
432  store_initial_[OPTI_PAR].push_back(DM::nan(symbol.sparsity()));
433 
434  set_meta(symbol, meta_data);
435  return symbol;
436 }
437 
438 MX OptiNode::parameter(const Sparsity& sp, const std::string& attribute) {
439  casadi_assert_dev(attribute=="full");
440 
441  // Prepare metadata
442  MetaVar meta_data;
443  meta_data.attribute = attribute;
444  meta_data.n = sp.size1();
445  meta_data.m = sp.size2();
446  meta_data.type = OPTI_PAR;
447  meta_data.count = count_++;
448  meta_data.i = count_par_++;
449 
450  MX symbol = MX::sym(name_prefix() + "p_" + str(count_par_), sp);
451  symbols_.push_back(symbol);
452  store_initial_[OPTI_PAR].push_back(DM::nan(symbol.sparsity()));
453 
454  set_meta(symbol, meta_data);
455  return symbol;
456 }
457 
458 MX OptiNode::parameter(casadi_int n, casadi_int m, const std::string& attribute) {
459  casadi_assert_dev(attribute=="full");
460 
461  // Prepare metadata
462  MetaVar meta_data;
463  meta_data.attribute = attribute;
464  meta_data.n = n;
465  meta_data.m = m;
466  meta_data.type = OPTI_PAR;
467  meta_data.count = count_++;
468  meta_data.i = count_par_++;
469 
470  MX symbol = MX::sym(name_prefix() + "p_" + str(count_par_), n, m);
471  symbols_.push_back(symbol);
472  store_initial_[OPTI_PAR].push_back(DM::nan(symbol.sparsity()));
473 
474  set_meta(symbol, meta_data);
475  return symbol;
476 }
477 
479  assert_solved();
480  if (stats_.empty()) {
481  stats_ = solver_.stats();
482  is_simple_ = get_from_dict(stats_,
483  "detect_simple_bounds_is_simple",
484  std::vector<bool>(ng(), false));
485  target_x_ = get_from_dict(stats_,
486  "detect_simple_bounds_target_x",
487  std::vector<casadi_int>{});
488  casadi_assert_dev(is_simple_.size()==ng());
489 
490  g_index_reduce_g_.resize(ng());
491  g_index_reduce_x_.resize(ng(), -1);
492  g_index_unreduce_g_.resize(ng(), -1);
493 
494  casadi_int* target_x_ptr = target_x_.data();
495 
496  casadi_int k=0;
497  for (casadi_int i=0;i<is_simple_.size();++i) {
498  if (!is_simple_[i]) {
499  g_index_reduce_g_[i] = k;
500  g_index_unreduce_g_[k] = i;
501  k++;
502  } else {
503  g_index_reduce_g_[i] = -1;
504  g_index_reduce_x_[i] = *target_x_ptr;
505  target_x_ptr++;
506  }
507  }
508 
509  reduced_ = any(is_simple_);
510  }
511  return stats_;
512 }
513 
514 casadi_int OptiNode::g_index_reduce_g(casadi_int i) const {
515  stats();
516  return g_index_reduce_g_[i];
517 }
518 casadi_int OptiNode::g_index_unreduce_g(casadi_int i) const {
519  stats();
520  return g_index_unreduce_g_[i];
521 }
522 casadi_int OptiNode::g_index_reduce_x(casadi_int i) const {
523  stats();
524  return g_index_reduce_x_[i];
525 }
526 
527 std::string OptiNode::return_status() const {
528  Dict mystats;
529  try {
530  mystats = stats();
531  } catch (...) {
532  //
533  }
534  if (mystats.find("return_status")!=mystats.end()) {
535  std::stringstream ss;
536  ss << mystats.at("return_status");
537  return ss.str();
538  }
539  return "unknown";
540 }
541 
542 bool OptiNode::return_success(bool accept_limit) const {
543  Dict mystats;
544  try {
545  mystats = stats();
546  } catch (...) {
547  //
548  }
549  bool success = false;
550  if (mystats.find("success")!=mystats.end()) success = mystats.at("success");
551  if (!accept_limit) return success;
552 
553  bool limited = false;
554  if (mystats.find("unified_return_status")!=mystats.end())
555  limited = mystats.at("unified_return_status")=="SOLVER_RET_LIMITED";
556  return success || limited;
557 }
558 
560  return solver_;
561 }
562 
563 void OptiNode::set_meta(const MX& m, const MetaVar& meta) {
564  meta_[m.get()] = meta;
565 }
566 
567 void OptiNode::set_meta_con(const MX& m, const MetaCon& meta) {
568  meta_con_[m.get()] = meta;
569 }
570 
571 void OptiNode::update_user_dict(const MX& m, const Dict& meta) {
572  if (has_con(m)) {
573  MetaCon m_update = get_meta_con(m);
574  MetaVar m_update2 = get_meta(m_update.dual_canon);
575  for (const auto & it : meta) {
576  m_update.extra[it.first] = it.second;
577  m_update2.extra[it.first] = it.second;
578  }
579  set_meta_con(m, m_update);
580  set_meta(m_update.dual_canon, m_update2);
581  } else {
582  for (const auto & s : MX::symvar(m)) {
583  MetaVar m_update = get_meta(s);
584  for (const auto & it : meta)
585  m_update.extra[it.first] = it.second;
586  set_meta(s, m_update);
587  }
588  }
589 }
590 
591 Dict OptiNode::user_dict(const MX& m) const {
592  if (has_con(m)) {
593  MetaCon meta = get_meta_con(m);
594  return meta.extra;
595  } else {
596  MetaVar meta = get_meta(m);
597  return meta.extra;
598  }
599 }
600 
601 MX OptiNode::dual(const MX& m) const {
602  return meta_con(m).dual;
603 }
604 
605 const MetaVar& OptiNode::meta(const MX& m) const {
606  assert_has(m);
607  auto find = meta_.find(m.get());
608  return find->second;
609 }
610 
611 const MetaCon& OptiNode::meta_con(const MX& m) const {
612  assert_has_con(m);
613  auto find = meta_con_.find(m.get());
614  return find->second;
615 }
616 
617 MetaVar& OptiNode::meta(const MX& m) {
618  assert_has(m);
619  auto find = meta_.find(m.get());
620  return find->second;
621 }
622 
623 MetaCon& OptiNode::meta_con(const MX& m) {
624  assert_has_con(m);
625  auto find = meta_con_.find(m.get());
626  return find->second;
627 }
628 
629 MetaVar OptiNode::get_meta(const MX& m) const {
630  return meta(m);
631 }
632 
634  return meta_con(m);
635 }
636 
637 bool OptiNode::has(const MX& m) const {
638  return meta_.find(m.get())!=meta_.end();
639 }
640 
641 bool OptiNode::has_con(const MX& m) const {
642  return meta_con_.find(m.get())!=meta_con_.end();
643 }
644 
645 void OptiNode::assert_has(const MX& m) const {
646  if (!has(m)) {
647  VariableType vt;
648  casadi_assert(m.is_symbolic(), "Symbol expected, got expression.");
649  if (parse_opti_name(m.name(), vt)) {
650  casadi_error("Unknown: " + describe(m));
651  } else {
652  casadi_error("Unknown: " + describe(m) + "\n"
653  "Note: you cannot use a raw MX.sym in your Opti problem,"
654  " only if you package it in a CasADi Function.");
655  }
656  }
657 }
658 
659 void OptiNode::assert_has_con(const MX& m) const {
660  casadi_assert(has_con(m), "Constraint not present in Opti stack.");
661 }
662 
663 casadi_int OptiNode::instance_count_ = 0;
664 
665 bool OptiNode::parse_opti_name(const std::string& name, VariableType& vt) const {
666  casadi_int i = name.find("opti");
667  if (i!=0) return false;
668 
669  i = name.find("_");
670  i++;
671  if (i==std::string::npos) return false;
672  if (name.substr(i, 1)=="x") {
673  vt = OPTI_VAR;
674  return true;
675  } else if (name.substr(i, 1)=="p") {
676  vt = OPTI_PAR;
677  return true;
678  } else if (name.substr(i, 5)=="lam_g") {
679  vt = OPTI_DUAL_G;
680  return true;
681  }
682 
683  return false;
684 }
685 
686 std::string OptiNode::variable_type_to_string(VariableType vt) const {
687  auto it = VariableType2String_.find(vt);
688  if (it==VariableType2String_.end()) return "unknown variable type";
689  return it->second;
690 
691 }
692 std::map<VariableType, std::string> OptiNode::VariableType2String_ =
693  {{OPTI_VAR, "decision variable"},
694  {OPTI_PAR, "parameter"},
695  {OPTI_DUAL_G, "dual variable"}};
696 
697 std::vector<MX> OptiNode::initial() const {
698  std::vector<MX> ret;
699  for (const auto& e : symvar()) {
700  if (meta(e).type==OPTI_VAR || meta(e).type==OPTI_DUAL_G)
701  ret.push_back(e==store_initial_.at(meta(e).type)[meta(e).i]);
702  }
703  return ret;
704 }
705 
706 std::vector<MX> OptiNode::value_variables() const {
707  std::vector<MX> ret;
708  for (const auto& e : symvar()) {
709  if (meta(e).type==OPTI_VAR)
710  ret.push_back(e==store_latest_.at(meta(e).type)[meta(e).i]);
711  }
712  return ret;
713 }
714 
715 std::vector<MX> OptiNode::value_parameters() const {
716  std::vector<MX> ret;
717  for (const auto& e : symvar()) {
718  if (meta(e).type==OPTI_PAR)
719  ret.push_back(e==store_initial_.at(meta(e).type)[meta(e).i]);
720  }
721  return ret;
722 }
723 
725  casadi_assert(!f_.is_empty() || !g_.empty(),
726  "You need to specify at least an objective (y calling 'minimize'), "
727  "or a constraint (by calling 'subject_to').");
728 
729  symbol_active_.clear();
730  symbol_active_.resize(symbols_.size());
731 
732  // Gather all expressions
733  MX total_expr = vertcat(f_, veccat(g_));
734 
735  // Categorize the symbols appearing in those expressions
736  for (const auto& d : symvar(total_expr))
737  symbol_active_[meta(d).count] = true;
738 
739  std::vector<MX> x = active_symvar(OPTI_VAR);
740  for (casadi_int i=0;i<x.size();++i) meta(x[i]).active_i = i;
741 
742  casadi_int offset = 0;
743  for (const auto& v : x) {
744  meta(v).start = offset;
745  offset+= v.nnz();
746  meta(v).stop = offset;
747  }
748  std::vector<MX> p = active_symvar(OPTI_PAR);
749  for (casadi_int i=0;i<p.size();++i) meta(p[i]).active_i = i;
750 
751  // Fill the nlp definition
752  nlp_["x"] = veccat(x);
753  nlp_["p"] = veccat(p);
754 
755  nlp_unscaled_["x"] = veccat(x);
756  nlp_unscaled_["p"] = veccat(p);
757 
758  discrete_.clear();
759  for (const MX& e : x) {
760  discrete_.insert(discrete_.end(), e.nnz(), meta(e).domain == OPTI_DOMAIN_INTEGER);
761  }
762 
763  nlp_["f"] = f_;
764  nlp_unscaled_["f"] = f_;
765 
766  offset = 0;
767  for (casadi_int i=0;i<g_.size();++i) {
768  MetaCon& r = meta_con(g_[i]);
769  MetaVar& r2 = meta(r.dual_canon);
770  symbol_active_[r2.count] = true;
771 
772  // Compute offsets for this constraint:
773  // location into the global constraint variable
774  r.start = offset;
775  offset+= r.canon.nnz();
776  r.stop = offset;
777 
778  r2.start = r.start;
779  r2.stop = r.stop;
780 
781  }
782  index_all_to_g_.resize(offset);
783  offset = 0;
784  casadi_int offset_g = 0;
785  for (const auto& g : g_) {
786  const MetaCon& r = meta_con(g);
787  if (r.type==OPTI_PSD) {
788  for (casadi_int i=0;i<r.stop-r.start;++i) {
789  index_all_to_g_[r.start+i] = -1;
790  }
791  } else {
792  for (casadi_int i=0;i<r.stop-r.start;++i) {
793  index_all_to_g_[r.start+i] = offset_g+i;
794  }
795  offset_g += r.canon.nnz();
796  }
797  }
798 
799  std::vector<MX> lam = active_symvar(OPTI_DUAL_G);
800  for (casadi_int i=0;i<lam.size();++i) meta(lam[i]).active_i = i;
801 
802  lam_ = veccat(lam);
803 
804  // Collect bounds and canonical form of constraints
805  std::vector<MX> g_all, g_unscaled_all;
806  std::vector<MX> h_all, h_unscaled_all;
807  std::vector<MX> lbg_all, lbg_unscaled_all;
808  std::vector<MX> ubg_all, ubg_unscaled_all;
809 
810  g_linear_scale_.clear();
811  h_linear_scale_.clear();
812  std::vector<DM> g_linear_scale, h_linear_scale;
813 
814  equality_.clear();
815  for (const auto& g : g_) {
816  if (meta_con(g).type==OPTI_PSD) {
817  h_all.push_back(meta_con(g).canon/meta_con(g).linear_scale);
818  if (meta_con(g).canon.numel()==meta_con(g).linear_scale.numel()) {
819  h_linear_scale.push_back(meta_con(g).linear_scale);
820  } else {
821  casadi_assert_dev(meta_con(g).linear_scale.numel()==1);
822  h_linear_scale.push_back(DM::ones(meta_con(g).canon.sparsity())*meta_con(g).linear_scale);
823  }
824  h_unscaled_all.push_back(meta_con(g).canon);
825  } else {
826  g_all.push_back(meta_con(g).canon/meta_con(g).linear_scale);
827  if (meta_con(g).canon.numel()==meta_con(g).linear_scale.numel()) {
828  g_linear_scale.push_back(meta_con(g).linear_scale);
829  } else {
830  casadi_assert_dev(meta_con(g).linear_scale.numel()==1);
831  g_linear_scale.push_back(DM::ones(meta_con(g).canon.sparsity())*meta_con(g).linear_scale);
832  }
833  g_unscaled_all.push_back(meta_con(g).canon);
834  lbg_all.push_back(meta_con(g).lb/meta_con(g).linear_scale);
835  lbg_unscaled_all.push_back(meta_con(g).lb);
836  ubg_all.push_back(meta_con(g).ub/meta_con(g).linear_scale);
837  ubg_unscaled_all.push_back(meta_con(g).ub);
838 
839  equality_.insert(equality_.end(),
840  meta_con(g).canon.numel(),
841  meta_con(g).type==OPTI_EQUALITY || meta_con(g).type==OPTI_GENERIC_EQUALITY);
842  }
843  }
844 
845  nlp_["g"] = veccat(g_all);
846  g_linear_scale_ = veccat(g_linear_scale).nonzeros();
847  nlp_unscaled_["g"] = veccat(g_unscaled_all);
848  if (problem_type_=="conic") {
849  nlp_["h"] = diagcat(h_all);
850  nlp_unscaled_["h"] = diagcat(h_unscaled_all);
851  h_linear_scale_ = veccat(h_linear_scale).nonzeros();
852  }
853 
854  // Get scaling data
855  std::vector<DM> linear_scale = active_values(OPTI_VAR, store_linear_scale_);
856  std::vector<DM> linear_scale_offset = active_values(OPTI_VAR, store_linear_scale_offset_);
857 
858  linear_scale_ = veccat(linear_scale).nonzeros();
859  linear_scale_offset_ = veccat(linear_scale_offset).nonzeros();
860 
861  // Unscaled version of x
862  std::vector<MX> x_unscaled(x.size());
863  for (casadi_int i=0;i<x.size();++i) {
864  x_unscaled[i] = x[i]*linear_scale[i] + linear_scale_offset[i];
865  }
866 
867  // Perform substitution
868  std::vector<MX> expr = {nlp_["f"], nlp_["g"]};
869  if (problem_type_=="conic") expr.push_back(nlp_["h"]);
870  std::vector<MX> fgh = substitute(expr, x, x_unscaled);
871  nlp_["f"] = fgh[0];
872  nlp_["g"] = fgh[1];
873  if (problem_type_=="conic") {
874  nlp_["h"] = fgh[2];
875  }
876 
877  // Scale of objective
878  nlp_["f"] = nlp_["f"]/f_linear_scale_;
879 
880  // Create bounds helper function
881  MXDict bounds;
882  bounds["p"] = nlp_["p"];
883  bounds_lbg_ = veccat(lbg_all);
884  bounds_ubg_ = veccat(ubg_all);
885  bounds_unscaled_lbg_ = veccat(lbg_unscaled_all);
886  bounds_unscaled_ubg_ = veccat(ubg_unscaled_all);
887 
888  bounds["lbg"] = bounds_lbg_;
889  bounds["ubg"] = bounds_ubg_;
890 
891  bounds_ = Function("bounds", bounds, {"p"}, {"lbg", "ubg"});
892  mark_problem_dirty(false);
893 }
894 
896  if (problem_dirty()) return baked_copy().scale_helper(h);
897  std::string name = h.name();
898  MX g_linear_scale_mx = g_linear_scale();
899  MX g_linear_scale_inv = 1/g_linear_scale();
900  MX f_linear_scale_mx = f_linear_scale();
901  MX x_linear_scale_mx = x_linear_scale();
902  MX x_linear_scale_offset_mx = x_linear_scale_offset();
903  if (name=="nlp_jac_g") {
904  MXDict arg;
905  arg["x"] = MX::sym("x", nx());
906  arg["p"] = MX::sym("p", np());
907  MXDict args;
908  args["x"] = arg["x"]*x_linear_scale_mx+x_linear_scale_offset_mx;
909  args["p"] = arg["p"];
910  MXDict res = h(args);
911  for (const auto & it : res) {
912  if (it.first=="g") {
913  arg[it.first] = it.second*g_linear_scale_inv;
914  } else if (it.first=="jac_g_x") {
915  arg[it.first] = mtimes(
916  mtimes(diag(g_linear_scale_inv), it.second),
917  diag(x_linear_scale_mx));
918  } else {
919  casadi_error("Unknown output '" + it.first + "'. Expecting g, jac_g_x.");
920  }
921  }
922  Function ret(name, arg, h.name_in(), h.name_out());
923  return ret;
924  } else if (name=="nlp_hess_l") {
925  MXDict arg;
926  arg["x"] = MX::sym("x", nx());
927  arg["p"] = MX::sym("p", np());
928  arg["lam_f"] = MX::sym("lam_f");
929  arg["lam_g"] = MX::sym("lam_g", ng());
930  MXDict args;
931  args["x"] = arg["x"]*x_linear_scale_mx+x_linear_scale_offset_mx;
932  args["p"] = arg["p"];
933  args["lam_f"] = arg["lam_f"]/f_linear_scale_mx;
934  args["lam_g"] = arg["lam_g"]/g_linear_scale_mx;
935  MXDict res = h(args);
936  for (const auto & it : res) {
937  if (it.first=="triu_hess_gamma_x_x" ||
938  it.first=="hess_gamma_x_x" ||
939  (it.second.size1()==nx() && it.second.is_square())) {
940  MX D = diag(x_linear_scale_mx);
941  arg[it.first] = mtimes(mtimes(D, it.second), D);
942  } else {
943  casadi_error("Unknown output '" + it.first + "'. Expecting triu_hess_gamma_x_x");
944  }
945  }
946  Function ret(name, arg, h.name_in(), h.name_out());
947  return ret;
948  } else {
949  casadi_error("Unknown helper function '" + name + "'");
950  }
951 }
952 
953 void OptiNode::solver(const std::string& solver_name, const Dict& plugin_options,
954  const Dict& solver_options) {
955  solver_name_ = solver_name;
956  solver_options_ = plugin_options;
957  if (!solver_options.empty())
958  solver_options_[solver_name] = solver_options;
960 }
961 
962 std::vector<MX> OptiNode::sort(const std::vector<MX>& v) const {
963  // We exploit the fact that std::map is ordered
964 
965  // Populate map
966  std::map<casadi_int, MX> unordered;
967  for (const auto& d : v)
968  unordered[meta(d).count] = d;
969 
970  // Loop over map (ordered)
971  std::vector<MX> ret;
972  for (auto const &e : unordered)
973  ret.push_back(e.second);
974  return ret;
975 }
976 
977 std::vector<MX> OptiNode::symvar() const {
978  return symbols_;
979 }
980 
981 std::vector<MX> OptiNode::symvar(const MX& expr) const {
982  return sort(MX::symvar(expr));
983 }
984 
985 std::vector<MX> OptiNode::ineq_unchain(const MX& a, bool& flipped) {
986  flipped = false;
987  casadi_assert_dev(a.is_op(OP_LE) || a.is_op(OP_LT));
988 
989  // Is there inequalities in the left or right leaf?
990  bool left = a.dep(0).is_op(OP_LE) || a.dep(0).is_op(OP_LT);
991  bool right = a.dep(1).is_op(OP_LE) || a.dep(1).is_op(OP_LT);
992  casadi_assert_dev(!left || !right);
993 
994  if (!left && !right)
995  return {a.dep(0), a.dep(1)}; // Simple inequality
996 
997  // We have a chain of inequalities
998  bool ineq = !left;
999  std::vector<MX> ret = {a.dep(!ineq)};
1000  MX e = a.dep(ineq);
1001  while (e.is_op(OP_LE) || e.is_op(OP_LT)) {
1002  casadi_assert_dev(!e.is_op(OP_EQ));
1003  casadi_assert_dev(!e.dep(!ineq).is_op(OP_LE) && !e.dep(!ineq).is_op(OP_LT));
1004  ret.push_back(e.dep(!ineq));
1005  e = e.dep(ineq);
1006  }
1007  ret.push_back(e);
1008  if (left) std::reverse(ret.begin(), ret.end());
1009  flipped = !left;
1010 
1011  return ret;
1012 }
1013 
1014 void OptiNode::assert_only_opti_symbols(const MX& e) const {
1015  std::vector<MX> symbols = MX::symvar(e);
1016  for (const auto& s : symbols) assert_has(s);
1017 }
1018 
1019 void OptiNode::assert_only_opti_nondual(const MX& e) const {
1020  std::vector<MX> symbols = MX::symvar(e);
1021  for (const auto& s : symbols) {
1022  assert_has(s);
1023  casadi_assert(meta(s).type!=OPTI_DUAL_G, "Dual variables forbidden in this context.");
1024  }
1025 }
1026 
1027 bool OptiNode::is_parametric(const MX& expr) const {
1028  return symvar(expr, OPTI_VAR).empty();
1029 }
1030 
1031 MetaCon OptiNode::canon_expr(const MX& expr, const DM& linear_scale) const {
1032  MX c = expr;
1033 
1034  MetaCon con;
1035  con.original = expr;
1036  casadi_assert(linear_scale.is_scalar() || linear_scale.size()==expr.size(),
1037  "Linear scale must have the same size as the expression. "
1038  "You got linear_scale " + con.linear_scale.dim() + " while " + expr.dim() + " is expected.");
1039  con.linear_scale = linear_scale;
1040 
1041  if (c.is_op(OP_LE) || c.is_op(OP_LT)) { // Inequalities
1042  std::vector<MX> ret;
1043  bool flipped;
1044  std::vector<MX> args = ineq_unchain(c, flipped);
1045  std::vector<bool> parametric;
1046  for (auto &a : args) parametric.push_back(is_parametric(a));
1047 
1048  if (args.size()==2 && (parametric[0] || parametric[1])) {
1049  // case: g(x,p) <= bound(p)
1050  MX e = args[0]-args[1];
1051  if (e.is_vector()) {
1052  casadi_assert(!parametric[0] || !parametric[1],
1053  "Constraint must contain decision variables.");
1054  if (problem_type_=="conic") {
1055  if (args[0].op()==OP_NORMF || args[0].op()==OP_NORM2) {
1056  args[0] = -soc(args[0].dep(), args[1]);
1057  args[1] = 0;
1058  }
1059  } else {
1060  con.type = OPTI_INEQUALITY;
1061  if (parametric[0]) {
1062  con.lb = args[0]*DM::ones(e.sparsity());
1063  con.ub = inf*DM::ones(e.sparsity());
1064  con.canon = args[1]*DM::ones(e.sparsity());
1065  } else {
1066  con.lb = -inf*DM::ones(e.sparsity());
1067  con.ub = args[1]*DM::ones(e.sparsity());
1068  con.canon = args[0]*DM::ones(e.sparsity());
1069  }
1070  return con;
1071  }
1072  }
1073  // Fall through to generic inequalities
1074  } else if (args.size()==3 && parametric[0] && parametric[2]) {
1075  // lb(p) <= g(x,p) <= ub(p)
1077  con.lb = args[0]*DM::ones(args[1].sparsity());
1078  con.ub = args[2]*DM::ones(args[1].sparsity());
1079  con.canon = args[1]*DM::ones(args[1].sparsity());
1080  con.flipped = flipped;
1081  con.n = 2;
1082  return con;
1083  }
1084 
1085  bool type_known = false;
1086  for (casadi_int j=0;j<args.size()-1;++j) {
1087  MX e = args[j]-args[j+1];
1088  if (problem_type_=="conic") {
1089  if (args[j].op()==OP_NORMF || args[j].op()==OP_NORM2) {
1090  args[j] = -soc(args[j].dep(), args[j+1]);
1091  args[j+1] = 0;
1092  e = args[j]-args[j+1];
1093  }
1094  }
1095  if (e.is_vector()) {
1096  // g1(x,p) <= g2(x,p)
1097  ret.push_back(e);
1098  casadi_assert_dev(!type_known || con.type==OPTI_GENERIC_INEQUALITY);
1099  type_known = true;
1101  con.flipped = flipped;
1102  } else {
1103  // A(x,p) >= b(p)
1104  MX a = args[j+1];
1105  MX b = args[j];
1106  e = a-b;
1107 
1108  casadi_assert(e.size1()==e.size2(),
1109  "Matrix inequalities must be square. Did you mean element-wise inequality instead?");
1110  if (a.is_scalar()) a*= MX::eye(e.size1());
1111  if (b.is_scalar()) b*= MX::eye(e.size1());
1112  e = a-b;
1113 
1114  ret.push_back(e);
1115  casadi_assert_dev(!type_known || con.type==OPTI_PSD);
1116  type_known = true;
1117  con.type = OPTI_PSD;
1118  }
1119  }
1120 
1121  if (con.type==OPTI_GENERIC_INEQUALITY) {
1122  con.canon = veccat(ret);
1123  con.lb = -inf*DM::ones(con.canon.sparsity());
1124  con.ub = DM::zeros(con.canon.sparsity());
1125  con.n = ret.size();
1126  if (!con.linear_scale.is_scalar()) {
1127  con.linear_scale = repmat(con.linear_scale, ret.size(), 1);
1128  }
1129  } else {
1130  con.canon = diagcat(ret);
1131  con.n = ret.size();
1132  }
1133  return con;
1134  } else if (c.is_op(OP_EQ)) { // Inequalities
1135  casadi_assert(!is_parametric(c.dep(0)) || !is_parametric(c.dep(1)),
1136  "Constraint must contain decision variables.");
1137  MX e = c.dep(0)-c.dep(1);
1138  if (is_parametric(c.dep(0))) {
1139  con.canon = c.dep(1)*DM::ones(e.sparsity());
1140  con.lb = c.dep(0)*DM::ones(e.sparsity());
1141  con.type = OPTI_EQUALITY;
1142  casadi_assert(c.dep(0).size1()<=c.dep(1).size1() && c.dep(0).size2()<=c.dep(1).size2(),
1143  "Constraint shape mismatch.");
1144  } else if (is_parametric(c.dep(1))) {
1145  con.canon = c.dep(0)*DM::ones(e.sparsity());
1146  con.lb = c.dep(1)*DM::ones(e.sparsity());
1147  con.type = OPTI_EQUALITY;
1148  casadi_assert(c.dep(1).size1()<=c.dep(0).size1() && c.dep(1).size2()<=c.dep(0).size2(),
1149  "Constraint shape mismatch.");
1150  } else {
1151  con.lb = DM::zeros(e.sparsity());
1152  con.canon = e;
1154  }
1155  con.ub = con.lb;
1156  return con;
1157  } else { // Something else
1158  con.type = OPTI_UNKNOWN;
1159  con.canon = c;
1160  return con;
1161  }
1162 
1163 }
1164 
1166  casadi_assert(solved(),
1167  "This action is forbidden since you have not solved the Opti stack yet "
1168  "(with calling 'solve').");
1169 }
1170 
1172  casadi_assert(!problem_dirty(),
1173  "This action is forbidden since you have not baked the Opti stack yet "
1174  "(with calling 'solve').");
1175 }
1176 
1178  casadi_assert_dev(g_.empty());
1179  casadi_assert_dev(f_.is_empty());
1180 }
1181 
1182 void OptiNode::minimize(const MX& f, double linear_scale) {
1183  assert_only_opti_nondual(f);
1185  casadi_assert(f.is_scalar(), "Objective must be scalar, got " + f.dim() + ".");
1186  f_ = f;
1187  f_linear_scale_ = linear_scale;
1188 }
1189 
1190 void OptiNode::subject_to(const MX& g, const DM& linear_scale, const Dict& options) {
1191  assert_only_opti_nondual(g);
1193  g_.push_back(g);
1194 
1195  casadi_assert(!g.is_empty(), "You passed an empty expression to `subject_to`. "
1196  "Make sure the number of rows and columns is non-zero. "
1197  "Got " + g.dim(true) + ".");
1198  casadi_assert(g.nnz()>0, "You passed a fully sparse expression to `subject_to`. "
1199  "Make sure the expression has at least one nonzero. "
1200  "Got " + g.dim(true) + ".");
1201  casadi_assert(!g.is_constant(), "You passed a constant to `subject_to`. "
1202  "You need a symbol to form a constraint.");
1203 
1204  // Store the meta-data
1205  set_meta_con(g, canon_expr(g, linear_scale));
1206  register_dual(meta_con(g));
1207 
1208  for (auto && it : options) {
1209  if (it.first=="stacktrace") {
1210  meta_con(g).extra["stacktrace"] = it.second.to_dict_vector();
1211  meta(meta_con(g).dual_canon).extra["stacktrace"] = it.second.to_dict_vector();
1212  } else if (it.first=="meta") {
1213  update_user_dict(g, it.second.to_dict());
1214  } else {
1215  casadi_error("Unknown option: " + it.first);
1216  }
1217  }
1218 }
1219 
1222  g_.clear();
1223  store_initial_[OPTI_DUAL_G].clear();
1224  store_latest_[OPTI_DUAL_G].clear();
1225  count_dual_ = 0;
1226 }
1227 
1228 std::vector<MX> OptiNode::symvar(const MX& expr, VariableType type) const {
1229  std::vector<MX> ret;
1230  for (const auto& d : symvar(expr)) {
1231  if (meta(d).type==type) ret.push_back(d);
1232  }
1233 
1234  return ret;
1235 }
1236 
1237 void OptiNode::res(const DMDict& res) {
1238  const std::vector<double> & x_v = res.at("x").nonzeros();
1239  for (const auto &v : active_symvar(OPTI_VAR)) {
1240  casadi_int i = meta(v).i;
1241  std::vector<double> & data_v = store_latest_[OPTI_VAR][i].nonzeros();
1242  for (casadi_int i=0;i<data_v.size();++i) {
1243  casadi_int j = meta(v).start+i;
1244  data_v[i] = x_v[j]*linear_scale_[j] + linear_scale_offset_[j];
1245  }
1246  }
1247  if (res.find("lam_g")!=res.end()) {
1248  const std::vector<double> & lam_v = res.at("lam_g").nonzeros();
1249  for (const auto &v : active_symvar(OPTI_DUAL_G)) {
1250  casadi_int i = meta(v).i;
1251  std::vector<double> & data_v = store_latest_[OPTI_DUAL_G][i].nonzeros();
1252  for (casadi_int i=0;i<data_v.size();++i) {
1253  casadi_int j = meta(v).start+i;
1254  j = index_all_to_g_.at(j);
1255  if (j<0) continue;
1256  data_v[i] = lam_v.at(j)/g_linear_scale_.at(j)*f_linear_scale_;
1257  }
1258  }
1259  }
1260  res_ = res;
1261  mark_solved();
1262 }
1263 
1264 bool OptiNode::old_callback() const {
1265  if (callback_.is_null()) return false;
1266  InternalOptiCallback* cb = static_cast<InternalOptiCallback*>(callback_.get());
1267  return !cb->associated_with(this);
1268 }
1269 
1271  if (problem_dirty()) {
1272  bake();
1273  }
1274 
1275  // Verify the constraint types
1276  for (const auto& g : g_) {
1277  if (problem_type_!="conic") {
1278  if (meta_con(g).type==OPTI_PSD)
1279  casadi_error("Psd constraints not implemented yet. "
1280  "Perhaps you intended an element-wise inequality? "
1281  "In that case, make sure that the matrix is flattened (e.g. mat(:)).");
1282  }
1283  }
1284 
1285  Dict solver_options_all = solver_options_;
1286 
1287  if (solver_options_all.find("equality")==solver_options_all.end()) {
1288  solver_options_all["equality"] = equality_;
1289  }
1290 
1291  if (solver_options_all.find("discrete")==solver_options_all.end()) {
1292  solver_options_all["discrete"] = discrete_;
1293  }
1294 
1295  Dict opts = solver_options_all;
1296 
1297  // Handle callbacks
1298  if (callback && user_callback_) {
1299  callback_ = Function::create(new InternalOptiCallback(*this), Dict());
1300  opts["iteration_callback"] = callback_;
1301  }
1302 
1303  casadi_assert(!solver_name_.empty(),
1304  "You must call 'solver' on the Opti stack to select a solver. "
1305  "Suggestion: opti.solver('ipopt')");
1306 
1307  if (problem_type_=="conic") {
1308  return qpsol("solver", solver_name_, nlp_, opts);
1309  } else {
1310  return nlpsol("solver", solver_name_, nlp_, opts);
1311  }
1312 
1313 }
1314 
1315 // Solve the problem
1316 OptiSol OptiNode::solve(bool accept_limit) {
1317 
1318  if (problem_dirty()) {
1319  bake();
1320  }
1321 
1322  bool solver_update = solver_dirty() || old_callback() || (user_callback_ && callback_.is_null());
1323 
1324  if (solver_update) {
1325  solver_ = solver_construct(true);
1326  mark_solver_dirty(false);
1327  }
1328 
1329  solve_prepare();
1330  res(solve_actual(arg_));
1331 
1332  std::string ret = return_status();
1333 
1334  casadi_assert(return_success(accept_limit),
1335  "Solver failed. You may use opti.debug.value to investigate the latest values of variables."
1336  " return_status is '" + ret + "'");
1337 
1338  return copy();
1339 }
1340 
1341 // Solve the problem
1343 
1344 
1345  // Verify the constraint types
1346  for (const auto& g : g_) {
1347  if (meta_con(g).type==OPTI_UNKNOWN)
1348  casadi_error("Constraint type unknown. Use ==, >= or <= .");
1349  }
1350 
1351  if (user_callback_) {
1352  InternalOptiCallback* cb = static_cast<InternalOptiCallback*>(callback_.get());
1353  cb->reset();
1354  }
1355 
1356  // Get initial guess and parameter values
1357  arg_["x0"] = (veccat(active_values(OPTI_VAR))-linear_scale_offset_)/linear_scale_;
1358  arg_["p"] = veccat(active_values(OPTI_PAR));
1359  arg_["lam_g0"] = veccat(active_values(OPTI_DUAL_G));
1360  if (!arg_["p"].is_regular()) {
1361  std::vector<MX> s = active_symvar(OPTI_PAR);
1362  std::vector<DM> v = active_values(OPTI_PAR);
1363  for (casadi_int i=0;i<s.size();++i) {
1364  casadi_assert(v[i].is_regular(),
1365  "You have forgotten to assign a value to a parameter ('set_value'), "
1366  "or have set it to NaN/Inf:\n" + describe(s[i], 1));
1367  }
1368  }
1369 
1370 
1371  // Evaluate bounds for given parameter values
1372  DMDict arg;
1373  arg["p"] = arg_["p"];
1374  DMDict res = bounds_(arg);
1375  arg_["lbg"] = res["lbg"];
1376  arg_["ubg"] = res["ubg"];
1377 
1378  stats_.clear();
1379 }
1380 
1382  return solver_(arg);
1383 }
1384 
1385 bool override_num(const std::map<casadi_int, MX> & temp, std::vector<DM>& num, casadi_int i) {
1386  // Override when values are supplied
1387  auto it = temp.find(i);
1388  if (it==temp.end()) {
1389  return true;
1390  } else {
1391  Slice all;
1392  DM t = static_cast<DM>(it->second);
1393  num.back().set(t, false, all, all);
1394  }
1395  return false;
1396 }
1397 
1398 DM OptiNode::value(const MX& expr, const std::vector<MX>& values, bool scaled) const {
1399  std::vector<MX> x = symvar(expr, OPTI_VAR);
1400  std::vector<MX> p = symvar(expr, OPTI_PAR);
1401  std::vector<MX> lam = symvar(expr, OPTI_DUAL_G);
1402 
1403  Function helper = Function("helper", std::vector<MX>{veccat(x), veccat(p), veccat(lam)}, {expr});
1404  if (helper.has_free())
1405  casadi_error("This expression has symbols that are not defined "
1406  "within Opti using variable/parameter.");
1407 
1408  std::map<VariableType, std::map<casadi_int, MX> > temp;
1409  temp[OPTI_DUAL_G] = std::map<casadi_int, MX>();
1410  for (const auto& v : values) {
1411  casadi_assert_dev(v.is_op(OP_EQ));
1412  casadi_int i = meta(v.dep(1)).i;
1413  casadi_assert_dev(v.dep(0).is_constant());
1414  temp[meta(v.dep(1)).type][i] = v.dep(0);
1415  }
1416 
1417  bool undecided_vars = false;
1418  std::vector<DM> x_num;
1419  for (const auto& e : x) {
1420  casadi_int i = meta(e).i;
1421  x_num.push_back(store_latest_.at(OPTI_VAR).at(i));
1422  undecided_vars |= override_num(temp[OPTI_VAR], x_num, i);
1423  if (scaled) {
1424  x_num.back() = x_num.back()/store_linear_scale_.at(OPTI_VAR)[meta(e).i] -
1425  store_linear_scale_offset_.at(OPTI_VAR)[meta(e).i];
1426  }
1427  }
1428 
1429  std::vector<DM> lam_num;
1430  for (const auto& e : lam) {
1431  casadi_int i = meta(e).i;
1432  casadi_assert(i<store_latest_.at(OPTI_DUAL_G).size(),
1433  "This expression has a dual for a constraint that is not given to Opti:\n" +
1434  describe(e, 1));
1435  lam_num.push_back(store_latest_.at(OPTI_DUAL_G).at(i));
1436  undecided_vars |= override_num(temp[OPTI_DUAL_G], lam_num, i);
1437  }
1438 
1439  std::vector<DM> p_num;
1440  for (const auto& e : p) {
1441  casadi_int i = meta(e).i;
1442  p_num.push_back(store_initial_.at(OPTI_PAR).at(i));
1443  override_num(temp[OPTI_PAR], p_num, i);
1444  casadi_assert(p_num.back().is_regular(),
1445  "This expression depends on a parameter with unset value:\n"+
1446  describe(e, 1));
1447  }
1448 
1449  if (undecided_vars) {
1450  assert_solved();
1451  for (const auto& e : x)
1452  casadi_assert(symbol_active_[meta(e).count],
1453  "This expression has symbols that do not appear in the constraints and objective:\n" +
1454  describe(e, 1));
1455  for (const auto& e : lam)
1456  casadi_assert(symbol_active_[meta(e).count],
1457  "This expression has a dual for a constraint that is not given to Opti:\n" +
1458  describe(e, 1));
1459  }
1460 
1461  std::vector<DM> arg = helper(std::vector<DM>{veccat(x_num), veccat(p_num), veccat(lam_num)});
1462  return arg[0];
1463 }
1464 
1465 void OptiNode::assert_active_symbol(const MX& m) const {
1466  assert_has(m);
1467  assert_baked();
1468  casadi_assert(symbol_active_[meta(m).count], "Opti symbol is not used in Solver."
1469  " It does not make sense to assign a value to it:\n" + describe(m, 1));
1470 }
1471 
1472 void OptiNode::set_initial(const std::vector<MX>& assignments) {
1473  for (const auto& v : assignments) {
1474  casadi_assert_dev(v.is_op(OP_EQ));
1475  casadi_assert_dev(v.dep(0).is_constant());
1476  if (has(v.dep(1)))
1477  set_initial(v.dep(1), static_cast<DM>(v.dep(0)));
1478  }
1479 }
1480 
1481 void OptiNode::set_domain(const MX& x, const std::string& domain) {
1482  mark_solved(false);
1484  casadi_assert(x.is_valid_input(), "First argument to set_domain should be a variable.");
1485  DomainType type;
1486  if (domain=="real") {
1487  type = OPTI_DOMAIN_REAL;
1488  } else if (domain=="integer") {
1489  type = OPTI_DOMAIN_INTEGER;
1490  } else {
1491  casadi_error("Unknown domain '" + domain + "'. Known values are 'real', 'integer'.");
1492  }
1493  for (const auto& prim : x.primitives()) {
1494  MetaVar& m = meta(prim);
1495  m.domain = type;
1496  }
1497 }
1498 
1499 void OptiNode::set_value(const std::vector<MX>& assignments) {
1500  for (const auto& v : assignments) {
1501  casadi_assert_dev(v.is_op(OP_EQ));
1502  casadi_assert_dev(v.dep(0).is_constant());
1503  if (has(v.dep(1)))
1504  set_value(v.dep(1), static_cast<DM>(v.dep(0)));
1505  }
1506 }
1507 
1508 void OptiNode::set_value_internal(const MX& x, const DM& v,
1509  std::map< VariableType, std::vector<DM> >& store) {
1510  mark_solved(false);
1511  casadi_assert_dev(v.is_regular());
1512  if (x.is_symbolic()) {
1513  DM& target = store[meta(x).type][meta(x).i];
1514  Slice all;
1515  target.set(v, false, all, all);
1516  return;
1517  }
1518 
1519  // Obtain symbolic primitives
1520  std::vector<MX> symbols = MX::symvar(x);
1521  MX symbols_cat = veccat(symbols);
1522 
1523  std::string failmessage = "You cannot set initial/value of an arbitrary expression. "
1524  "Use symbols or simple mappings of symbols.";
1525 
1526  // Assert x is linear in its symbolic primitives
1527  for (bool b : which_depends(x, symbols_cat, 2, false)) casadi_assert(!b, failmessage);
1528 
1529  // Evaluate jacobian of expr wrt symbols
1530  Dict opts = {{"compact", true}};
1531  Function Jf("Jf", std::vector<MX>{}, std::vector<MX>{jacobian(x, veccat(symbols), opts)});
1532  DM J = Jf(std::vector<DM>{})[0];
1533  Sparsity sp_JT = J.T().sparsity();
1534 
1535  Function Ff("Ff", symbols, {x});
1536  DM E = Ff(std::vector<DM>(symbols.size(), 0))[0];
1537  std::vector<double>& e = E.nonzeros();
1538 
1539  // Cast the v input into the expected sparsity
1540  Slice all;
1541  DM value(x.sparsity());
1542  value.set(v, false, all, all);
1543 
1544  // Purge empty rows
1545  std::vector<casadi_int> filled_rows = sum2(J).get_row();
1546  J = J(filled_rows, all); // NOLINT(cppcoreguidelines-slicing)
1547 
1548  // Get rows and columns of the mapping
1549  std::vector<casadi_int> row, col;
1550  J.sparsity().get_triplet(row, col);
1551  const std::vector<double>& scaling = J.nonzeros();
1552  const std::vector<double>& data_original = value.nonzeros();
1553 
1554  std::vector<double> data; data.reserve(value.nnz());
1555  for (casadi_int i=0;i<value.nnz();++i) {
1556  double v = data_original[i];
1557  casadi_int nz = sp_JT.colind()[i+1]-sp_JT.colind()[i];
1558  casadi_assert(nz<=1, failmessage);
1559  if (nz) {
1560  data.push_back(v);
1561  } else {
1562  casadi_assert(v==e[i], "In initial/value assignment: "
1563  "inconsistent numerical values. At nonzero " + str(i) + ", lhs has "
1564  + str(e[i]) + ", while rhs has " + str(v) + ".");
1565  }
1566  }
1567 
1568  // Contiguous workspace for nonzeros of all involved symbols
1569  std::vector<double> temp(symbols_cat.nnz(), casadi::nan);
1570  for (casadi_int k=0;k<data.size();++k) {
1571  double& lhs = temp[col[k]];
1572  double rhs = data[row[k]]/scaling[row[k]];
1573  if (std::isnan(lhs)) {
1574  // Assign in the workspace
1575  lhs = rhs;
1576  } else {
1577  casadi_assert(lhs==rhs, "Initial/value assignment with mapping is ambiguous.");
1578  }
1579  }
1580 
1581  casadi_int offset = 0;
1582  for (const auto & s : symbols) {
1583  DM& target = store[meta(s).type][meta(s).i];
1584  std::vector<double>& data = target.nonzeros();
1585  // Loop over nonzeros in each symbol
1586  for (casadi_int i=0;i<s.nnz();++i) {
1587  // Copy from the workspace (barring fields that were not set)
1588  double v = temp[offset+i];
1589  if (!std::isnan(v)) data[i] = v;
1590  }
1591  offset+=s.nnz();
1592  }
1593 
1594 }
1595 
1596 void OptiNode::set_initial(const MX& x, const DM& v) {
1597  for (const auto & s : MX::symvar(x))
1598  casadi_assert(meta(s).type!=OPTI_PAR,
1599  "You cannot set an initial value for a parameter. Did you mean 'set_value'?");
1600  set_value_internal(x, v, store_initial_);
1601 }
1602 
1603 void OptiNode::set_value(const MX& x, const DM& v) {
1604  for (const auto & s : MX::symvar(x))
1605  casadi_assert(meta(s).type!=OPTI_VAR,
1606  "You cannot set a value for a variable. Did you mean 'set_initial'?");
1607  set_value_internal(x, v, store_initial_);
1608 }
1609 
1610 void OptiNode::set_linear_scale(const MX& x, const DM& scale, const DM& offset) {
1611  for (const auto & s : MX::symvar(x))
1612  casadi_assert(meta(s).type!=OPTI_PAR,
1613  "You cannot set a scale value for a parameter.");
1614  casadi_assert(scale.is_scalar() || scale.size()==x.size(),
1615  "Dimension mismatch in linear_scale. Expected " + x.dim() + ", got " + scale.dim()+ ".");
1616  set_value_internal(x, scale, store_linear_scale_);
1617  casadi_assert(offset.is_scalar() || offset.size()==x.size(),
1618  "Dimension mismatch in linear_scale offset. Expected " + x.dim() +
1619  ", got " + scale.dim()+ ".");
1620  set_value_internal(x, offset, store_linear_scale_offset_);
1621 }
1622 
1623 std::vector<MX> OptiNode::active_symvar(VariableType type) const {
1624  if (symbol_active_.empty()) return std::vector<MX>{};
1625  std::vector<MX> ret;
1626  for (const auto& s : symbols_) {
1627  if (symbol_active_[meta(s).count] && meta(s).type==type)
1628  ret.push_back(s);
1629  }
1630  return ret;
1631 }
1632 
1633 std::vector<DM> OptiNode::active_values(VariableType type) const {
1634  return active_values(type, store_initial_);
1635 }
1636 
1638  const std::map< VariableType, std::vector<DM> >& store) const {
1639  if (symbol_active_.empty()) return std::vector<DM>{};
1640  std::vector<DM> ret;
1641  for (const auto& s : symbols_) {
1642  if (symbol_active_[meta(s).count] && meta(s).type==type) {
1643  ret.push_back(store.at(meta(s).type)[meta(s).i]);
1644  }
1645  }
1646  return ret;
1647 }
1648 
1649 Function OptiNode::to_function(const std::string& name,
1650  const std::vector<MX>& args, const std::vector<MX>& res,
1651  const std::vector<std::string>& name_in,
1652  const std::vector<std::string>& name_out,
1653  const Dict& opts) {
1654  if (problem_dirty()) return baked_copy().to_function(name, args, res, name_in, name_out, opts);
1655 
1656  Function solver = solver_construct(false);
1657 
1658  // Get initial guess and parameter values
1659  std::vector<MX> x0, p, lam_g;
1663 
1664  casadi_int k = 0;
1665  for (const auto& a : args) {
1666  casadi_assert(a.is_valid_input(), "Argument " + str(k) + " is not purely symbolic.");
1667  k++;
1668  for (const auto& prim : a.primitives()) {
1669  if (!symbol_active_[meta(prim).count]) continue;
1670  casadi_int i = meta(prim).active_i;
1671  if (meta(prim).type==OPTI_VAR) {
1672  x0.at(i) = prim;
1673  } else if (meta(prim).type==OPTI_PAR) {
1674  p.at(i) = prim;
1675  } else if (meta(prim).type==OPTI_DUAL_G) {
1676  lam_g.at(i) = prim;
1677  } else {
1678  casadi_error("Unknown");
1679  }
1680  }
1681  }
1682  MXDict arg;
1683  arg["p"] = veccat(p);
1684 
1685  // Evaluate bounds for given parameter values
1686  MXDict r = bounds_(arg);
1687  arg["x0"] = veccat(x0);
1688  arg["lam_g0"] = veccat(lam_g);
1689  arg["lbg"] = r["lbg"];
1690  arg["ubg"] = r["ubg"];
1691 
1692  r = solver(arg);
1693 
1694  std::vector<MX> helper_in = {veccat(active_symvar(OPTI_VAR)),
1695  veccat(active_symvar(OPTI_PAR)),
1696  veccat(active_symvar(OPTI_DUAL_G))};
1697  Function helper("helper", helper_in, {res});
1698 
1699  std::vector<MX> arg_in = helper(std::vector<MX>{r.at("x"), arg["p"], r.at("lam_g")});
1700 
1701  return Function(name, args, arg_in, name_in, name_out, opts);
1702 
1703 }
1704 
1705 void OptiNode::disp(std::ostream &stream, bool more) const {
1706 
1707 }
1708 
1709 casadi_int OptiNode::instance_number() const {
1710  return instance_number_;
1711 }
1712 
1713 void OptiNode::show_infeasibilities(double tol, const Dict& opts) const {
1714  stats();
1715  std::vector<double> lbg_ = value(lbg()).get_elements();
1716  std::vector<double> ubg_ = value(ubg()).get_elements();
1717  std::vector<double> g_scaled_ = value(nlp_.at("g"), std::vector<MX>(), true).get_elements();
1718  std::vector<double> lbg_scaled_ = value(bounds_lbg_, std::vector<MX>(), true).get_elements();
1719  std::vector<double> ubg_scaled_ = value(bounds_ubg_, std::vector<MX>(), true).get_elements();
1720 
1721 
1722  std::vector<double> g_ = value(g()).get_elements();
1723  uout() << "Violated constraints (tol " << tol << "), in order of declaration:" << std::endl;
1724 
1725  for (casadi_int i=0;i<g_.size();++i) {
1726  double err = std::max(g_[i]-ubg_[i], lbg_[i]-g_[i]);
1727  double err_scaled = std::max(g_scaled_[i]-ubg_scaled_[i], lbg_scaled_[i]-g_scaled_[i]);
1728  if (err>=tol) {
1729  uout() << "------- i = " << i+GlobalOptions::start_index;
1730  uout() << "/" << g_.size();
1731 
1732  if (reduced_) {
1733  if (is_simple_[i]) {
1734  if (GlobalOptions::start_index==0) {
1735  uout() << " reduced to bound on x[" << g_index_reduce_x_.at(i) << "]";
1736  } else {
1737  uout() << " reduced to bound on x(" << g_index_reduce_x_.at(i)+1 << ")";
1738  }
1739  } else {
1740  uout() << " reduced to g[" << g_index_reduce_g_.at(i) << "]";
1741  }
1742  }
1743 
1744  uout() << " ------ " << std::endl;
1745  uout() << lbg_[i] << " <= " << g_[i] << " <= " << ubg_[i];
1746  uout() << " (viol " << err << ")" << std::endl;
1747  if (g_[i]!=g_scaled_[i]) {
1748  uout() << lbg_scaled_[i] << " <= " << g_scaled_[i] << " <= " << ubg_scaled_[i];
1749  uout() << " (scaled) (viol " << err_scaled << ")" << std::endl;
1750  }
1751  uout() << g_describe(i, opts) << std::endl;
1752  }
1753  }
1754 }
1755 
1756 } // namespace casadi
FunctionInternal(const std::string &name)
Constructor.
Function object.
Definition: function.hpp:60
FunctionInternal * get() const
Definition: function.cpp:353
const std::vector< std::string > & name_in() const
Get input scheme.
Definition: function.cpp:961
const std::string & name() const
Name of the function.
Definition: function.cpp:1307
static Function create(FunctionInternal *node)
Create from node.
Definition: function.cpp:336
bool has_free() const
Does the function have free variables.
Definition: function.cpp:1697
Dict stats(int mem=0) const
Get all statistics obtained at the end of the last evaluate call.
Definition: function.cpp:928
const std::vector< std::string > & name_out() const
Get output scheme.
Definition: function.cpp:965
casadi_int numel() const
Get the number of elements.
bool is_dense() const
Check if the matrix expression is dense.
bool is_empty(bool both=false) const
Check if the sparsity is empty, i.e. if one of the dimensions is zero.
std::pair< casadi_int, casadi_int > size() const
Get the shape.
bool is_vector() const
Check if the matrix is a row or column vector.
casadi_int nnz() const
Get the number of (structural) non-zero elements.
casadi_int size2() const
Get the second dimension (i.e. number of columns)
casadi_int size1() const
Get the first dimension (i.e. number of rows)
std::string dim(bool with_nz=false) const
Get string representation of dimensions.
static MatType ones(casadi_int nrow=1, casadi_int ncol=1)
Create a dense matrix or a matrix with specified sparsity with all entries one.
static MX sym(const std::string &name, casadi_int nrow=1, casadi_int ncol=1)
Create an nrow-by-ncol symbolic primitive.
const casadi_int * colind() const
Get the sparsity pattern. See the Sparsity class for details.
static MatType zeros(casadi_int nrow=1, casadi_int ncol=1)
Create a dense matrix or a matrix with specified sparsity with all entries zero.
bool is_scalar(bool scalar_and_dense=false) const
Check if the matrix expression is scalar.
bool is_null() const
Is a null pointer?
static casadi_int start_index
MX - Matrix expression.
Definition: mx.hpp:92
bool is_valid_input() const
Check if matrix can be used to define function inputs.
Definition: mx.cpp:925
const Sparsity & sparsity() const
Get the sparsity pattern.
Definition: mx.cpp:592
std::string name() const
Get the name.
Definition: mx.cpp:762
static std::vector< MX > symvar(const MX &x)
Definition: mx.cpp:1938
bool is_constant() const
Check if constant.
Definition: mx.cpp:770
static MX eye(casadi_int n)
Identity matrix.
Definition: mx.cpp:580
MXNode * get() const
Get a const pointer to the node.
Definition: mx.cpp:544
bool is_op(casadi_int op) const
Is it a certain operation.
Definition: mx.cpp:794
std::vector< MX > primitives() const
Get primitives.
Definition: mx.cpp:933
MX dep(casadi_int ch=0) const
Get the nth dependency as MX.
Definition: mx.cpp:754
bool is_symbolic() const
Check if symbolic.
Definition: mx.cpp:766
std::vector< Scalar > & nonzeros()
Matrix< Scalar > T() const
Transpose the matrix.
const Sparsity & sparsity() const
Const access the sparsity - reference to data member.
void set(const Matrix< Scalar > &m, bool ind1, const Slice &rr)
static Matrix< double > nan(const Sparsity &sp)
create a matrix with all nan
bool is_regular() const
Checks if expression does not contain NaN or Inf.
std::vector< Scalar > get_elements() const
Get all elements.
MX x_lookup(casadi_index i) const
Definition: optistack.cpp:605
std::string describe(const MX &x, casadi_index indent=0, const Dict &opts=Dict()) const
Definition: optistack.cpp:659
MX g_lookup(casadi_index i) const
Definition: optistack.cpp:637
std::string x_describe(casadi_index i, const Dict &opts=Dict()) const
Definition: optistack.cpp:645
std::string g_describe(casadi_index i, const Dict &opts=Dict()) const
Definition: optistack.cpp:652
A simplified interface for NLP modeling/solving.
Function solver_construct(bool callback=true)
Dict user_dict(const MX &m) const
casadi_int g_index_reduce_x(casadi_int i) const
std::vector< MX > value_variables() const
get assignment expressions for latest values
std::vector< MX > initial() const
get assignment expressions for initial values
std::vector< MX > active_symvar(VariableType type) const
std::string describe(const MX &x, casadi_int indent=0, const Dict &opts=Dict()) const
MX g() const
Get all (scalarised) constraint expressions as a column vector.
MetaCon get_meta_con(const MX &m) const
Get meta-data of symbol (for internal use only)
OptiAdvanced baked_copy() const
MX dual(const MX &m) const
get the dual variable
std::vector< DM > active_values(VariableType type) const
MX x() const
Get all (scalarised) decision variables as a symbolic column vector.
void minimize(const MX &f, double linear_scale=1)
Set objective.
casadi_int nx() const
Number of (scalarised) decision variables.
MX variable(casadi_int n=1, casadi_int m=1, const std::string &attribute="full")
Create a decision variable (symbol)
std::string x_describe(casadi_int i, const Dict &opts=Dict()) const
void bake()
Fix the structure of the optimization problem.
casadi_int instance_number() const
DM value(const MX &x, const std::vector< MX > &values=std::vector< MX >(), bool scaled=false) const
void disp(std::ostream &stream, bool more=false) const override
Print representation.
void set_domain(const MX &x, const std::string &domain)
Set domain of variable.
OptiSol solve(bool accept_limit)
Crunch the numbers; solve the problem.
casadi_int np() const
Number of (scalarised) parameters.
bool has_callback_class() const
MX g_lookup(casadi_int i) const
casadi_int ng() const
Number of (scalarised) constraints.
MetaVar get_meta(const MX &m) const
Get meta-data of symbol (for internal use only)
void subject_to()
Clear constraints.
std::vector< MX > symvar() const
void update_user_dict(const MX &m, const Dict &meta)
add meta-data of an expression
void set_meta(const MX &m, const MetaVar &meta)
Set meta-data of an expression.
MX parameter(casadi_int n=1, casadi_int m=1, const std::string &attribute="full")
Create a parameter (symbol); fixed during optimization.
bool is_parametric(const MX &expr) const
return true if expression is only dependant on Opti parameters, not variables
void assert_active_symbol(const MX &m) const
bool return_success(bool accept_limit) const
Did the solver return successfully?
std::vector< MX > value_parameters() const
bool problem_dirty() const
Function to_function(const std::string &name, const std::vector< MX > &args, const std::vector< MX > &res, const std::vector< std::string > &name_in, const std::vector< std::string > &name_out, const Dict &opts)
Create a CasADi Function from the Opti solver.
Opti copy() const
Copy.
DM x_linear_scale_offset() const
MX x_lookup(casadi_int i) const
casadi_int g_index_reduce_g(casadi_int i) const
void mark_problem_dirty(bool flag=true)
OptiNode(const std::string &problem_type)
Create Opti Context.
std::string return_status() const
Get return status of solver.
void set_initial(const MX &x, const DM &v)
void mark_solver_dirty(bool flag=true)
MX p() const
Get all (scalarised) parameters as a symbolic column vector.
casadi_int g_index_unreduce_g(casadi_int i) const
void set_value(const MX &x, const DM &v)
Set value of parameter.
double f_linear_scale() const
MX lam_g() const
Get dual variables as a symbolic column vector.
void set_linear_scale(const MX &x, const DM &scale, const DM &offset)
Set scale of a decision variable.
std::string g_describe(casadi_int i, const Dict &opts=Dict()) const
friend class InternalOptiCallback
void solver(const std::string &solver, const Dict &plugin_options=Dict(), const Dict &solver_options=Dict())
Solver.
MX f() const
Get objective expression.
void mark_solved(bool flag=true)
Function scale_helper(const Function &h) const
Scale a helper function constructed via opti.x, opti.g, ...
Function casadi_solver() const
Get the underlying CasADi solver of the Opti stack.
static OptiNode * create(const std::string &problem_type)
MetaCon canon_expr(const MX &expr, const DM &linear_scale=1) const
Interpret an expression (for internal use only)
void show_infeasibilities(double tol=0, const Dict &opts=Dict()) const
DMDict solve_actual(const DMDict &args)
Dict stats() const
Get statistics.
void set_meta_con(const MX &m, const MetaCon &meta)
Set meta-data of an expression.
A simplified interface for NLP modeling/solving.
Definition: optistack.hpp:662
A simplified interface for NLP modeling/solving.
Definition: optistack.hpp:90
Function scale_helper(const Function &h) const
Scale a helper function constructed via opti.x, opti.g, ...
Definition: optistack.cpp:254
Function to_function(const std::string &name, const std::vector< MX > &args, const std::vector< MX > &res, const Dict &opts=Dict())
Create a CasADi Function from the Opti solver.
Definition: optistack.cpp:435
static Opti create(OptiNode *node)
Definition: optistack.cpp:86
void clear_mem()
Clear all memory (called from destructor)
Class representing a Slice.
Definition: slice.hpp:48
General sparsity class.
Definition: sparsity.hpp:106
casadi_int size1() const
Get the number of rows.
Definition: sparsity.cpp:124
static Sparsity dense(casadi_int nrow, casadi_int ncol=1)
Create a dense rectangular sparsity pattern *.
Definition: sparsity.cpp:1012
casadi_int nnz() const
Get the number of (structural) non-zeros.
Definition: sparsity.cpp:148
casadi_int size2() const
Get the number of columns.
Definition: sparsity.cpp:128
static Sparsity lower(casadi_int n)
Create a lower triangular square sparsity pattern *.
Definition: sparsity.cpp:1049
Function qpsol(const std::string &name, const std::string &solver, const SXDict &qp, const Dict &opts)
Definition: conic.cpp:262
Function nlpsol(const std::string &name, const std::string &solver, const SXDict &nlp, const Dict &opts)
Definition: nlpsol.cpp:118
casadi_int nlpsol_n_out()
Number of NLP solver outputs.
Definition: nlpsol.cpp:263
std::vector< std::string > nlpsol_out()
Get NLP solver output scheme of NLP solvers.
Definition: nlpsol.cpp:206
The casadi namespace.
Definition: archiver.cpp:28
std::map< std::string, MX > MXDict
Definition: mx.hpp:1009
bool override_num(const std::map< casadi_int, MX > &temp, std::vector< DM > &num, casadi_int i)
T get_from_dict(const std::map< std::string, T > &d, const std::string &key, const T &default_value)
std::string description(Category v)
double if_else_zero(double x, double y)
Conditional assignment.
Definition: calculus.hpp:289
void assign_vector(const std::vector< S > &s, std::vector< D > &d)
@ OPTI_VAR
Definition: optistack.hpp:496
@ OPTI_DUAL_G
Definition: optistack.hpp:498
@ OPTI_PAR
Definition: optistack.hpp:497
double sign(double x)
Sign function, note that sign(nan) == nan.
Definition: calculus.hpp:264
std::vector< casadi_int > find(const std::vector< T > &v)
find nonzeros
@ OPTI_DOMAIN_INTEGER
Definition: optistack.hpp:502
@ OPTI_DOMAIN_REAL
Definition: optistack.hpp:501
std::string str(const T &v)
String representation, any type.
GenericType::Dict Dict
C++ equivalent of Python's dict or MATLAB's struct.
const double inf
infinity
Definition: calculus.hpp:50
bool any(const std::vector< bool > &v)
Check if any arguments are true.
Definition: casadi_misc.cpp:84
@ OPTI_DOUBLE_INEQUALITY
Definition: optistack.hpp:492
@ OPTI_INEQUALITY
Definition: optistack.hpp:491
@ OPTI_GENERIC_INEQUALITY
Definition: optistack.hpp:489
@ OPTI_EQUALITY
Definition: optistack.hpp:490
@ OPTI_GENERIC_EQUALITY
Definition: optistack.hpp:488
@ OPTI_UNKNOWN
Definition: optistack.hpp:494
@ OPTI_PSD
Definition: optistack.hpp:493
const double nan
Not a number.
Definition: calculus.hpp:53
bool all(const std::vector< bool > &v)
Check if all arguments are true.
Definition: casadi_misc.cpp:77
Matrix< double > DM
Definition: dm_fwd.hpp:33
bool is_regular(const std::vector< T > &v)
Checks if array does not contain NaN or Inf.
std::ostream & uout()
std::map< std::string, DM > DMDict
Definition: dm_fwd.hpp:36
@ OP_LT
Definition: calculus.hpp:70
@ OP_EQ
Definition: calculus.hpp:70
@ OP_NORM2
Definition: calculus.hpp:178
@ OP_LE
Definition: calculus.hpp:70
@ OP_NORMF
Definition: calculus.hpp:178
std::string filename(const std::string &path)
Definition: ghc.cpp:55
ConstraintType type
Definition: optistack.hpp:515
casadi_int n
Definition: optistack.hpp:518
casadi_int active_i
Definition: optistack.hpp:533
std::string attribute
Definition: optistack.hpp:526
DomainType domain
Definition: optistack.hpp:530
VariableType type
Definition: optistack.hpp:529
casadi_int count
Definition: optistack.hpp:531
casadi_int i
Definition: optistack.hpp:532
casadi_int m
Definition: optistack.hpp:528
casadi_int n
Definition: optistack.hpp:527