25 #include "optistack_internal.hpp"
28 #include "function_internal.hpp"
29 #include "global_options.hpp"
30 #include "filesystem_impl.hpp"
34 class InternalOptiCallback :
public FunctionInternal {
37 InternalOptiCallback(OptiNode& sol) :
FunctionInternal(class_name()), sol_(sol) {}
39 ~InternalOptiCallback()
override {
44 std::string class_name()
const override {
return "InternalOptiCallback";}
49 Sparsity get_sparsity_in(casadi_int i)
override {
54 }
else if (n==
"lam_x" || n==
"x") {
56 }
else if (n==
"lam_g" || n==
"g") {
58 }
else if (n==
"p" || n==
"lam_p") {
70 bool has_eval_dm()
const override {
return true;}
73 std::vector<DM> eval_dm(
const std::vector<DM>& arg)
const override {
82 if (sol_.user_callback_) sol_.user_callback_->call(i);
88 bool associated_with(
const OptiNode* o) {
return &sol_==o; }
101 user_callback_ = callback;
105 user_callback_ =
nullptr;
109 return user_callback_ != 0;
112 std::string OptiNode::format_stacktrace(
const Dict& stacktrace, casadi_int indent) {
113 std::string s_indent;
114 for (casadi_int i=0;i<indent;++i) {
118 std::string
filename = stacktrace.at(
"file").as_string();
119 casadi_int line = stacktrace.at(
"line").as_int();
121 std::string name = stacktrace.at(
"name").as_string();
122 if (name!=
"Unknown" && name!=
"<module>")
123 description +=
" in " + stacktrace.at(
"name").as_string();
126 std::istream& file = *file_ptr;
127 for (casadi_int i=0; i<line-1; ++i) {
128 file.ignore(std::numeric_limits<std::streamsize>::max(),
'\n');
130 std::string contents; std::getline(file, contents);
131 auto it = contents.find_first_not_of(
" \t\r\n");
132 if (it!=std::string::npos) {
133 description +=
"\n" + s_indent + contents.substr(it);
143 casadi_int max_stacktrace_depth = 1;
144 for (
const auto& op : opts) {
145 if (op.first==
"max_stacktrace_depth") {
146 max_stacktrace_depth = op.second.as_int();
148 casadi_warning(
"Unknown option '" + op.first +
"'");
151 std::string s_indent;
152 for (casadi_int i=0;i<indent;++i) {
158 description +=
"Opti " + variable_type_to_string(meta(expr).type) +
" '" + expr.
name() +
159 "' of shape " + expr.
dim();
161 auto it = extra.find(
"stacktrace");
162 if (it!=extra.end()) {
163 for (
const Dict& stacktrace : it->second.as_dict_vector()) {
164 description +=
", " + format_stacktrace(stacktrace, indent+1);
165 if (--max_stacktrace_depth==0)
break;
170 if (parse_opti_name(expr.
name(), vt)) {
171 description +=
"Opti " + variable_type_to_string(vt) +
" '" + expr.
name() +
172 "' of shape " + expr.
dim()+
173 ", belonging to a different instance of Opti.";
182 const Dict& extra = meta_con(expr).
extra;
183 auto it = extra.find(
"stacktrace");
184 if (it!=extra.end()) {
185 for (
const Dict& stacktrace : it->second.as_dict_vector()) {
186 description +=
", " + format_stacktrace(stacktrace, indent+1);
187 if (--max_stacktrace_depth==0)
break;
191 std::vector<MX> s =
symvar(expr);
195 description+=
"General expression, dependent on " +
str(s.size()) +
" symbols:";
196 for (casadi_int i=0;i<s.size();++i) {
217 ", part " +
str((casadi_int) local_i / expr.
numel()) +
".";
226 if (symbol.
numel()>1)
233 casadi_assert_dev(i>=0);
234 casadi_assert_dev(i<
nx());
236 for (
const auto& e :
x) {
240 casadi_error(
"Internal error");
246 casadi_assert_dev(i>=0);
247 casadi_assert_dev(i<
ng());
248 for (
const auto& e : g_) {
249 const MetaCon& m = meta_con(e);
252 casadi_error(
"Internal error");
257 count_(0), count_var_(0), count_par_(0), count_dual_(0) {
260 instance_number_ = instance_count_++;
261 user_callback_ =
nullptr;
267 casadi_assert(problem_type==
"nlp" || problem_type==
"conic",
268 "Specified problem type '" + problem_type +
"'unknown. "
269 "Choose 'nlp' (default) or 'conic'.");
270 problem_type_ = problem_type;
272 helpers_ = std::make_shared<ValueCache>();
286 meta_data.
count = count_++;
287 meta_data.
i = count_var_++;
292 if (attribute==
"symmetric") {
293 casadi_assert(n==m,
"You specified attribute 'symmetric', "
294 "while matrix is not even square, but " +
str(n) +
"-by-" +
str(m) +
".");
295 symbol =
MX::sym(name_prefix() +
"x_" +
str(count_var_), n*(n+1)/2);
297 }
else if (attribute==
"full") {
298 symbol =
MX::sym(name_prefix() +
"x_" +
str(count_var_), n, m);
301 casadi_error(
"Unknown attribute '" + attribute +
"'. Choose from 'full' or 'symmetric'.");
305 symbols_.push_back(symbol);
320 meta_data.
n = symbol.
size1();
321 meta_data.
m = symbol.
size2();
323 meta_data.
count = count_++;
324 meta_data.
i = count_var_++;
327 symbols_.push_back(symbol);
345 meta_data.
count = count_++;
346 meta_data.
i = count_var_++;
348 MX symbol =
MX::sym(name_prefix() +
"x_" +
str(count_var_), sp);
351 symbols_.push_back(symbol);
361 std::string OptiNode::name_prefix()
const {
362 return "opti" +
str(instance_number_) +
"_";
369 void OptiNode::register_dual(
MetaCon& c) {
377 meta_data.
count = count_++;
378 meta_data.
i = count_dual_++;
393 MX flat = vec(symbol);
397 Function
sign = Function(
"sign", {v}, {decide_left_right});
399 ret = MX(ret_sp, sign_map((c.
flipped ? -1 : 1)*flat)[0].T());
401 casadi_int block_size = N / c.
n;
402 std::vector<MX> original_blocks = vertsplit(fabs(flat), block_size);
403 std::vector<MX> blocks(N);
404 for (casadi_int i=0;i<c.
n;++i) {
406 blocks[
p] = original_blocks[i];
408 ret = MX(ret_sp, vertcat(blocks));
412 symbols_.push_back(symbol);
423 casadi_assert_dev(attribute==
"full");
428 meta_data.
n = symbol.
size1();
429 meta_data.
m = symbol.
size2();
431 meta_data.
count = count_++;
432 meta_data.
i = count_par_++;
434 symbols_.push_back(symbol);
442 casadi_assert_dev(attribute==
"full");
450 meta_data.
count = count_++;
451 meta_data.
i = count_par_++;
453 MX symbol =
MX::sym(name_prefix() +
"p_" +
str(count_par_), sp);
454 symbols_.push_back(symbol);
462 casadi_assert_dev(attribute==
"full");
470 meta_data.
count = count_++;
471 meta_data.
i = count_par_++;
473 MX symbol =
MX::sym(name_prefix() +
"p_" +
str(count_par_), n, m);
474 symbols_.push_back(symbol);
483 if (stats_.empty()) {
484 stats_ = solver_.
stats();
486 "detect_simple_bounds_is_simple",
487 std::vector<bool>(
ng(),
false));
489 "detect_simple_bounds_target_x",
490 std::vector<casadi_int>{});
491 casadi_assert_dev(is_simple_.size()==
ng());
493 g_index_reduce_g_.resize(
ng());
494 g_index_reduce_x_.resize(
ng(), -1);
495 g_index_unreduce_g_.resize(
ng(), -1);
497 casadi_int* target_x_ptr = target_x_.data();
500 for (casadi_int i=0;i<is_simple_.size();++i) {
501 if (!is_simple_[i]) {
502 g_index_reduce_g_[i] = k;
503 g_index_unreduce_g_[k] = i;
506 g_index_reduce_g_[i] = -1;
507 g_index_reduce_x_[i] = *target_x_ptr;
512 reduced_ =
any(is_simple_);
519 return g_index_reduce_g_[i];
523 return g_index_unreduce_g_[i];
527 return g_index_reduce_x_[i];
537 if (mystats.find(
"return_status")!=mystats.end()) {
538 std::stringstream ss;
539 ss << mystats.at(
"return_status");
552 bool success =
false;
553 if (mystats.find(
"success")!=mystats.end()) success = mystats.at(
"success");
554 if (!accept_limit)
return success;
556 bool limited =
false;
557 if (mystats.find(
"unified_return_status")!=mystats.end())
558 limited = mystats.at(
"unified_return_status")==
"SOLVER_RET_LIMITED";
559 return success || limited;
567 meta_[m.
get()] = meta;
571 meta_con_[m.
get()] = meta;
578 for (
const auto & it : meta) {
579 m_update.
extra[it.first] = it.second;
580 m_update2.
extra[it.first] = it.second;
587 for (
const auto & it : meta)
588 m_update.
extra[it.first] = it.second;
605 return meta_con(m).
dual;
608 const MetaVar& OptiNode::meta(
const MX& m)
const {
610 auto find = meta_.find(m.
get());
614 const MetaCon& OptiNode::meta_con(
const MX& m)
const {
616 auto find = meta_con_.find(m.get());
620 MetaVar& OptiNode::meta(
const MX& m) {
622 auto find = meta_.find(m.get());
626 MetaCon& OptiNode::meta_con(
const MX& m) {
628 auto find = meta_con_.find(m.get());
640 bool OptiNode::has(
const MX& m)
const {
641 return meta_.find(m.
get())!=meta_.end();
644 bool OptiNode::has_con(
const MX& m)
const {
645 return meta_con_.find(m.get())!=meta_con_.end();
648 void OptiNode::assert_has(
const MX& m)
const {
651 casadi_assert(m.is_symbolic(),
"Symbol expected, got expression.");
652 if (parse_opti_name(m.name(), vt)) {
653 casadi_error(
"Unknown: " +
describe(m));
655 casadi_error(
"Unknown: " +
describe(m) +
"\n"
656 "Note: you cannot use a raw MX.sym in your Opti problem,"
657 " only if you package it in a CasADi Function.");
662 void OptiNode::assert_has_con(
const MX& m)
const {
663 casadi_assert(has_con(m),
"Constraint not present in Opti stack.");
666 casadi_int OptiNode::instance_count_ = 0;
668 bool OptiNode::parse_opti_name(
const std::string& name,
VariableType& vt)
const {
669 casadi_int i = name.find(
"opti");
670 if (i!=0)
return false;
674 if (i==std::string::npos)
return false;
675 if (name.substr(i, 1)==
"x") {
678 }
else if (name.substr(i, 1)==
"p") {
681 }
else if (name.substr(i, 5)==
"lam_g") {
689 std::string OptiNode::variable_type_to_string(
VariableType vt)
const {
690 auto it = VariableType2String_.find(vt);
691 if (it==VariableType2String_.end())
return "unknown variable type";
695 std::map<VariableType, std::string> OptiNode::VariableType2String_ =
702 for (
const auto& e :
symvar()) {
704 ret.push_back(e==store_initial_.at(meta(e).type)[meta(e).i]);
711 for (
const auto& e :
symvar()) {
713 ret.push_back(e==store_latest_.at(meta(e).type)[meta(e).i]);
720 for (
const auto& e :
symvar()) {
722 ret.push_back(e==store_initial_.at(meta(e).type)[meta(e).i]);
728 casadi_assert(!f_.
is_empty() || !g_.empty(),
729 "You need to specify at least an objective (y calling 'minimize'), "
730 "or a constraint (by calling 'subject_to').");
732 symbol_active_.clear();
733 symbol_active_.resize(symbols_.size());
734 helpers_ = std::make_shared<ValueCache>();
737 MX total_expr = vertcat(f_, veccat(g_));
740 for (
const auto& d :
symvar(total_expr))
741 symbol_active_[meta(d).
count] =
true;
746 casadi_int offset = 0;
747 for (
const auto& v :
x) {
748 meta(v).
start = offset;
750 meta(v).
stop = offset;
756 nlp_[
"x"] = veccat(
x);
757 nlp_[
"p"] = veccat(
p);
759 nlp_unscaled_[
"x"] = veccat(
x);
760 nlp_unscaled_[
"p"] = veccat(
p);
763 for (
const MX& e :
x) {
768 nlp_unscaled_[
"f"] = f_;
771 for (casadi_int i=0;i<g_.size();++i) {
774 symbol_active_[r2.
count] =
true;
786 index_all_to_g_.resize(offset);
788 casadi_int offset_g = 0;
789 for (
const auto&
g : g_) {
792 for (casadi_int i=0;i<r.
stop-r.
start;++i) {
793 index_all_to_g_[r.
start+i] = -1;
796 for (casadi_int i=0;i<r.
stop-r.
start;++i) {
797 index_all_to_g_[r.
start+i] = offset_g+i;
804 for (casadi_int i=0;i<lam.size();++i) meta(lam[i]).
active_i = i;
809 std::vector<MX> g_all, g_unscaled_all;
810 std::vector<MX> h_all, h_unscaled_all;
811 std::vector<MX> lbg_all, lbg_unscaled_all;
812 std::vector<MX> ubg_all, ubg_unscaled_all;
814 g_linear_scale_.clear();
815 h_linear_scale_.clear();
819 for (
const auto&
g : g_) {
821 h_all.push_back(meta_con(
g).canon/meta_con(
g).linear_scale);
822 if (meta_con(
g).canon.numel()==meta_con(
g).linear_scale.numel()) {
823 h_linear_scale.push_back(meta_con(
g).linear_scale);
825 casadi_assert_dev(meta_con(
g).linear_scale.numel()==1);
826 h_linear_scale.push_back(
DM::ones(meta_con(
g).canon.sparsity())*meta_con(
g).linear_scale);
828 h_unscaled_all.push_back(meta_con(
g).canon);
830 g_all.push_back(meta_con(
g).canon/meta_con(
g).linear_scale);
831 if (meta_con(
g).canon.numel()==meta_con(
g).linear_scale.numel()) {
834 casadi_assert_dev(meta_con(
g).linear_scale.numel()==1);
837 g_unscaled_all.push_back(meta_con(
g).canon);
838 lbg_all.push_back(meta_con(
g).lb/meta_con(
g).linear_scale);
839 lbg_unscaled_all.push_back(meta_con(
g).lb);
840 ubg_all.push_back(meta_con(
g).ub/meta_con(
g).linear_scale);
841 ubg_unscaled_all.push_back(meta_con(
g).ub);
843 equality_.insert(equality_.end(),
844 meta_con(
g).canon.numel(),
849 nlp_[
"g"] = veccat(g_all);
851 nlp_unscaled_[
"g"] = veccat(g_unscaled_all);
852 if (problem_type_==
"conic") {
853 nlp_[
"h"] = diagcat(h_all);
854 nlp_unscaled_[
"h"] = diagcat(h_unscaled_all);
855 h_linear_scale_ = veccat(h_linear_scale).nonzeros();
862 linear_scale_ = veccat(linear_scale).nonzeros();
863 linear_scale_offset_ = veccat(linear_scale_offset).nonzeros();
866 std::vector<MX> x_unscaled(
x.
size());
867 for (casadi_int i=0;i<
x.
size();++i) {
868 x_unscaled[i] =
x[i]*linear_scale[i] + linear_scale_offset[i];
872 std::vector<MX> expr = {nlp_[
"f"], nlp_[
"g"]};
873 if (problem_type_==
"conic") expr.push_back(nlp_[
"h"]);
874 std::vector<MX> fgh = substitute(expr,
x, x_unscaled);
877 if (problem_type_==
"conic") {
882 nlp_[
"f"] = nlp_[
"f"]/f_linear_scale_;
886 bounds[
"p"] = nlp_[
"p"];
887 bounds_lbg_ = veccat(lbg_all);
888 bounds_ubg_ = veccat(ubg_all);
889 bounds_unscaled_lbg_ = veccat(lbg_unscaled_all);
890 bounds_unscaled_ubg_ = veccat(ubg_unscaled_all);
892 bounds[
"lbg"] = bounds_lbg_;
893 bounds[
"ubg"] = bounds_ubg_;
895 bounds_ =
Function(
"bounds", bounds, {
"p"}, {
"lbg",
"ubg"});
901 std::string name = h.
name();
907 if (name==
"nlp_jac_g") {
912 args[
"x"] =
arg[
"x"]*x_linear_scale_mx+x_linear_scale_offset_mx;
913 args[
"p"] =
arg[
"p"];
915 for (
const auto & it :
res) {
917 arg[it.first] = it.second*g_linear_scale_inv;
918 }
else if (it.first==
"jac_g_x") {
919 arg[it.first] = mtimes(
920 mtimes(diag(g_linear_scale_inv), it.second),
921 diag(x_linear_scale_mx));
923 casadi_error(
"Unknown output '" + it.first +
"'. Expecting g, jac_g_x.");
928 }
else if (name==
"nlp_hess_l") {
935 args[
"x"] =
arg[
"x"]*x_linear_scale_mx+x_linear_scale_offset_mx;
936 args[
"p"] =
arg[
"p"];
937 args[
"lam_f"] =
arg[
"lam_f"]/f_linear_scale_mx;
938 args[
"lam_g"] =
arg[
"lam_g"]/g_linear_scale_mx;
940 for (
const auto & it :
res) {
941 if (it.first==
"triu_hess_gamma_x_x" ||
942 it.first==
"hess_gamma_x_x" ||
943 (it.second.size1()==
nx() && it.second.is_square())) {
944 MX D = diag(x_linear_scale_mx);
945 arg[it.first] = mtimes(mtimes(
D, it.second),
D);
947 casadi_error(
"Unknown output '" + it.first +
"'. Expecting triu_hess_gamma_x_x");
953 casadi_error(
"Unknown helper function '" + name +
"'");
958 const Dict& solver_options) {
959 solver_name_ = solver_name;
960 solver_options_ = plugin_options;
961 if (!solver_options.empty())
962 solver_options_[solver_name] = solver_options;
966 std::vector<MX> OptiNode::sort(
const std::vector<MX>& v)
const {
970 std::map<casadi_int, MX> unordered;
971 for (
const auto& d : v)
972 unordered[meta(d).
count] = d;
976 for (
auto const &e : unordered)
977 ret.push_back(e.second);
989 std::vector<MX> OptiNode::ineq_unchain(
const MX& a,
bool& flipped) {
996 casadi_assert_dev(!left || !right);
999 return {a.
dep(0), a.
dep(1)};
1003 std::vector<MX> ret = {a.
dep(!ineq)};
1006 casadi_assert_dev(!e.is_op(
OP_EQ));
1007 casadi_assert_dev(!e.dep(!ineq).is_op(
OP_LE) && !e.dep(!ineq).is_op(
OP_LT));
1008 ret.push_back(e.dep(!ineq));
1012 if (left) std::reverse(ret.begin(), ret.end());
1018 void OptiNode::assert_only_opti_symbols(
const MX& e)
const {
1020 for (
const auto& s : symbols) assert_has(s);
1023 void OptiNode::assert_only_opti_nondual(
const MX& e)
const {
1025 for (
const auto& s : symbols) {
1027 casadi_assert(meta(s).type!=
OPTI_DUAL_G,
"Dual variables forbidden in this context.");
1041 "Linear scale must have the same size as the expression. "
1042 "You got linear_scale " + con.
linear_scale.
dim() +
" while " + expr.
dim() +
" is expected.");
1046 std::vector<MX> ret;
1048 std::vector<MX> args = ineq_unchain(c, flipped);
1049 std::vector<bool> parametric;
1050 for (
auto &a : args) parametric.push_back(
is_parametric(a));
1052 if (args.size()==2 && (parametric[0] || parametric[1])) {
1054 MX e = args[0]-args[1];
1056 casadi_assert(!parametric[0] || !parametric[1],
1057 "Constraint must contain decision variables.");
1058 if (problem_type_==
"conic") {
1060 args[0] = -soc(args[0].dep(), args[1]);
1065 if (parametric[0]) {
1078 }
else if (args.size()==3 && parametric[0] && parametric[2]) {
1081 con.
lb = args[0]*
DM::ones(args[1].sparsity());
1082 con.
ub = args[2]*
DM::ones(args[1].sparsity());
1089 bool type_known =
false;
1090 for (casadi_int j=0;j<args.size()-1;++j) {
1091 MX e = args[j]-args[j+1];
1092 if (problem_type_==
"conic") {
1094 args[j] = -soc(args[j].dep(), args[j+1]);
1096 e = args[j]-args[j+1];
1113 "Matrix inequalities must be square. Did you mean element-wise inequality instead?");
1126 con.
canon = veccat(ret);
1134 con.
canon = diagcat(ret);
1140 "Constraint must contain decision variables.");
1147 "Constraint shape mismatch.");
1153 "Constraint shape mismatch.");
1171 "This action is forbidden since you have not solved the Opti stack yet "
1172 "(with calling 'solve').");
1177 "This action is forbidden since you have not baked the Opti stack yet "
1178 "(with calling 'solve').");
1182 casadi_assert_dev(g_.empty());
1187 assert_only_opti_nondual(
f);
1189 casadi_assert(
f.
is_scalar(),
"Objective must be scalar, got " +
f.
dim() +
".");
1191 f_linear_scale_ = linear_scale;
1195 assert_only_opti_nondual(
g);
1199 casadi_assert(!
g.
is_empty(),
"You passed an empty expression to `subject_to`. "
1200 "Make sure the number of rows and columns is non-zero. "
1201 "Got " +
g.
dim(
true) +
".");
1202 casadi_assert(
g.
nnz()>0,
"You passed a fully sparse expression to `subject_to`. "
1203 "Make sure the expression has at least one nonzero. "
1204 "Got " +
g.
dim(
true) +
".");
1205 casadi_assert(!
g.
is_constant(),
"You passed a constant to `subject_to`. "
1206 "You need a symbol to form a constraint.");
1210 register_dual(meta_con(
g));
1212 for (
auto && it : options) {
1213 if (it.first==
"stacktrace") {
1214 meta_con(
g).
extra[
"stacktrace"] = it.second.to_dict_vector();
1215 meta(meta_con(
g).dual_canon).
extra[
"stacktrace"] = it.second.to_dict_vector();
1216 }
else if (it.first==
"meta") {
1219 casadi_error(
"Unknown option: " + it.first);
1233 std::vector<MX> ret;
1234 for (
const auto& d :
symvar(expr)) {
1235 if (meta(d).type==type) ret.push_back(d);
1242 const std::vector<double> & x_v =
res.at(
"x").nonzeros();
1244 casadi_int i = meta(v).
i;
1245 std::vector<double> & data_v = store_latest_[
OPTI_VAR][i].nonzeros();
1246 for (casadi_int i=0;i<data_v.size();++i) {
1247 casadi_int j = meta(v).
start+i;
1248 data_v[i] = x_v[j]*linear_scale_[j] + linear_scale_offset_[j];
1251 if (
res.find(
"lam_g")!=
res.end()) {
1252 const std::vector<double> & lam_v =
res.at(
"lam_g").nonzeros();
1254 casadi_int i = meta(v).
i;
1255 std::vector<double> & data_v = store_latest_[
OPTI_DUAL_G][i].nonzeros();
1256 for (casadi_int i=0;i<data_v.size();++i) {
1257 casadi_int j = meta(v).
start+i;
1258 j = index_all_to_g_.at(j);
1260 data_v[i] = lam_v.at(j)/g_linear_scale_.at(j)*f_linear_scale_;
1268 bool OptiNode::old_callback()
const {
1269 if (callback_.
is_null())
return false;
1271 return !cb->associated_with(
this);
1280 for (
const auto&
g : g_) {
1281 if (problem_type_!=
"conic") {
1283 casadi_error(
"Psd constraints not implemented yet. "
1284 "Perhaps you intended an element-wise inequality? "
1285 "In that case, make sure that the matrix is flattened (e.g. mat(:)).");
1289 Dict solver_options_all = solver_options_;
1291 if (solver_options_all.find(
"equality")==solver_options_all.end()) {
1292 solver_options_all[
"equality"] = equality_;
1295 if (solver_options_all.find(
"discrete")==solver_options_all.end()) {
1296 solver_options_all[
"discrete"] = discrete_;
1299 Dict opts = solver_options_all;
1302 if (callback && user_callback_) {
1304 opts[
"iteration_callback"] = callback_;
1307 casadi_assert(!solver_name_.empty(),
1308 "You must call 'solver' on the Opti stack to select a solver. "
1309 "Suggestion: opti.solver('ipopt')");
1311 if (problem_type_==
"conic") {
1312 return qpsol(
"solver", solver_name_, nlp_, opts);
1314 return nlpsol(
"solver", solver_name_, nlp_, opts);
1326 bool solver_update =
solver_dirty() || old_callback() || (user_callback_ && callback_.
is_null());
1328 if (solver_update) {
1339 "Solver failed. You may use opti.debug.value to investigate the latest values of variables."
1340 " return_status is '" + ret +
"'");
1350 for (
const auto&
g : g_) {
1352 casadi_error(
"Constraint type unknown. Use ==, >= or <= .");
1355 if (user_callback_) {
1367 for (casadi_int i=0;i<s.size();++i) {
1369 "You have forgotten to assign a value to a parameter ('set_value'), "
1370 "or have set it to NaN/Inf:\n" +
describe(s[i], 1));
1377 arg[
"p"] = arg_[
"p"];
1379 arg_[
"lbg"] =
res[
"lbg"];
1380 arg_[
"ubg"] =
res[
"ubg"];
1386 return solver_(
arg);
1389 bool override_num(
const std::map<casadi_int, MX> & temp, std::vector<DM>& num, casadi_int i) {
1391 auto it = temp.find(i);
1392 if (it==temp.end()) {
1396 DM t =
static_cast<DM>(it->second);
1405 std::shared_ptr<ValueHelper> vh;
1406 if (!helpers_->incache(expr, vh)) {
1407 vh = std::make_shared<ValueHelper>();
1414 vh->helper =
Function(
"helper", std::vector<MX>{veccat(
x), veccat(
p), veccat(lam)}, {expr});
1415 if (vh->helper.has_free())
1416 casadi_error(
"This expression has symbols that are not defined "
1417 "within Opti using variable/parameter.");
1418 helpers_->tocache_if_missing(expr, vh);
1421 const std::vector<MX>&
x = vh->x;
1422 const std::vector<MX>&
p = vh->p;
1423 const std::vector<MX>& lam = vh->lam;
1424 const Function& helper = vh->helper;
1426 std::map<VariableType, std::map<casadi_int, MX> > temp;
1428 for (
const auto& v : values) {
1429 casadi_assert_dev(v.is_op(
OP_EQ));
1430 casadi_int i = meta(v.dep(1)).
i;
1431 casadi_assert_dev(v.dep(0).is_constant());
1432 temp[meta(v.dep(1)).
type][i] = v.dep(0);
1435 bool undecided_vars =
false;
1436 std::vector<DM> x_num;
1437 for (
const auto& e :
x) {
1438 casadi_int i = meta(e).
i;
1439 x_num.push_back(store_latest_.at(
OPTI_VAR).at(i));
1442 x_num.back() = x_num.back()/store_linear_scale_.at(
OPTI_VAR)[meta(e).
i] -
1443 store_linear_scale_offset_.at(
OPTI_VAR)[meta(e).
i];
1447 std::vector<DM> lam_num;
1448 for (
const auto& e : lam) {
1449 casadi_int i = meta(e).
i;
1450 casadi_assert(i<store_latest_.at(
OPTI_DUAL_G).size(),
1451 "This expression has a dual for a constraint that is not given to Opti:\n" +
1453 lam_num.push_back(store_latest_.at(
OPTI_DUAL_G).at(i));
1457 std::vector<DM> p_num;
1458 for (
const auto& e :
p) {
1459 casadi_int i = meta(e).
i;
1460 p_num.push_back(store_initial_.at(
OPTI_PAR).at(i));
1462 casadi_assert(p_num.back().is_regular(),
1463 "This expression depends on a parameter with unset value:\n"+
1467 if (undecided_vars) {
1469 for (
const auto& e :
x)
1470 casadi_assert(symbol_active_[meta(e).count],
1471 "This expression has symbols that do not appear in the constraints and objective:\n" +
1473 for (
const auto& e : lam)
1474 casadi_assert(symbol_active_[meta(e).count],
1475 "This expression has a dual for a constraint that is not given to Opti:\n" +
1479 std::vector<DM>
arg = helper(std::vector<DM>{veccat(x_num), veccat(p_num), veccat(lam_num)});
1486 casadi_assert(symbol_active_[meta(m).count],
"Opti symbol is not used in Solver."
1487 " It does not make sense to assign a value to it:\n" +
describe(m, 1));
1491 for (
const auto& v : assignments) {
1492 casadi_assert_dev(v.is_op(
OP_EQ));
1493 casadi_assert_dev(v.dep(0).is_constant());
1502 casadi_assert(
x.
is_valid_input(),
"First argument to set_domain should be a variable.");
1504 if (domain==
"real") {
1506 }
else if (domain==
"integer") {
1509 casadi_error(
"Unknown domain '" + domain +
"'. Known values are 'real', 'integer'.");
1518 for (
const auto& v : assignments) {
1519 casadi_assert_dev(v.is_op(
OP_EQ));
1520 casadi_assert_dev(v.dep(0).is_constant());
1526 void OptiNode::set_value_internal(
const MX& x,
const DM& v,
1531 DM& target = store[meta(
x).
type][meta(
x).
i];
1539 MX symbols_cat = veccat(symbols);
1541 std::string failmessage =
"You cannot set initial/value of an arbitrary expression. "
1542 "Use symbols or simple mappings of symbols.";
1545 for (
bool b : which_depends(
x, symbols_cat, 2,
false)) casadi_assert(!b, failmessage);
1548 Dict opts = {{
"compact",
true}};
1549 Function Jf(
"Jf", std::vector<MX>{}, std::vector<MX>{jacobian(
x, veccat(symbols), opts)});
1550 DM J = Jf(std::vector<DM>{})[0];
1553 Function Ff(
"Ff", symbols, {
x});
1554 DM E = Ff(std::vector<DM>(symbols.size(), 0))[0];
1555 std::vector<double>& e = E.
nonzeros();
1563 std::vector<casadi_int> filled_rows = sum2(J).get_row();
1564 J = J(filled_rows,
all);
1567 std::vector<casadi_int> row, col;
1568 J.sparsity().get_triplet(row, col);
1569 const std::vector<double>& scaling = J.nonzeros();
1570 const std::vector<double>& data_original =
value.
nonzeros();
1572 std::vector<double> data; data.reserve(
value.
nnz());
1573 for (casadi_int i=0;i<
value.
nnz();++i) {
1574 double v = data_original[i];
1575 casadi_int nz = sp_JT.
colind()[i+1]-sp_JT.colind()[i];
1576 casadi_assert(nz<=1, failmessage);
1580 casadi_assert(v==e[i],
"In initial/value assignment: "
1581 "inconsistent numerical values. At nonzero " +
str(i) +
", lhs has "
1582 +
str(e[i]) +
", while rhs has " +
str(v) +
".");
1587 std::vector<double> temp(symbols_cat.nnz(),
casadi::nan);
1588 for (casadi_int k=0;k<data.size();++k) {
1589 double& lhs = temp[col[k]];
1590 double rhs = data[row[k]]/scaling[row[k]];
1591 if (std::isnan(lhs)) {
1595 casadi_assert(lhs==rhs,
"Initial/value assignment with mapping is ambiguous.");
1599 casadi_int offset = 0;
1600 for (
const auto & s : symbols) {
1601 DM& target = store[meta(s).
type][meta(s).
i];
1602 std::vector<double>& data = target.
nonzeros();
1604 for (casadi_int i=0;i<s.nnz();++i) {
1606 double v = temp[offset+i];
1607 if (!std::isnan(v)) data[i] = v;
1616 casadi_assert(meta(s).type!=
OPTI_PAR,
1617 "You cannot set an initial value for a parameter. Did you mean 'set_value'?");
1618 set_value_internal(
x, v, store_initial_);
1623 casadi_assert(meta(s).type!=
OPTI_VAR,
1624 "You cannot set a value for a variable. Did you mean 'set_initial'?");
1625 set_value_internal(
x, v, store_initial_);
1630 casadi_assert(meta(s).type!=
OPTI_PAR,
1631 "You cannot set a scale value for a parameter.");
1633 "Dimension mismatch in linear_scale. Expected " +
x.
dim() +
", got " + scale.
dim()+
".");
1634 set_value_internal(
x, scale, store_linear_scale_);
1636 "Dimension mismatch in linear_scale offset. Expected " +
x.
dim() +
1637 ", got " + scale.
dim()+
".");
1638 set_value_internal(
x, offset, store_linear_scale_offset_);
1642 if (symbol_active_.empty())
return std::vector<MX>{};
1643 std::vector<MX> ret;
1644 for (
const auto& s : symbols_) {
1645 if (symbol_active_[meta(s).count] && meta(s).type==type)
1652 if (symbols_.empty())
return std::vector<MX>{};
1653 std::vector<MX> ret;
1654 for (
const auto& s : symbols_) {
1655 if (meta(s).type==type)
1666 const std::map<
VariableType, std::vector<DM> >& store)
const {
1667 if (symbol_active_.empty())
return std::vector<DM>{};
1668 std::vector<DM> ret;
1669 for (
const auto& s : symbols_) {
1670 if (symbol_active_[meta(s).count] && meta(s).type==type) {
1671 ret.push_back(store.at(meta(s).type)[meta(s).i]);
1678 const std::vector<MX>& args,
const std::vector<MX>& res,
1679 const std::vector<std::string>& name_in,
1680 const std::vector<std::string>& name_out,
1687 std::vector<MX> x0,
p,
lam_g;
1693 for (
const auto& a : args) {
1694 casadi_assert(a.
is_valid_input(),
"Argument " +
str(k) +
" is not purely symbolic.");
1697 if (!symbol_active_[meta(prim).count])
continue;
1698 casadi_int i = meta(prim).
active_i;
1701 }
else if (meta(prim).type==
OPTI_PAR) {
1706 casadi_error(
"Unknown");
1711 arg[
"p"] = veccat(
p);
1715 arg[
"x0"] = veccat(x0);
1717 arg[
"lbg"] = r[
"lbg"];
1718 arg[
"ubg"] = r[
"ubg"];
1727 std::vector<MX> arg_in = helper(std::vector<MX>{r.at(
"x"),
arg[
"p"], r.at(
"lam_g")});
1729 return Function(name, args, arg_in, name_in, name_out, opts);
1738 return instance_number_;
1745 std::vector<double> g_scaled_ =
value(nlp_.at(
"g"), std::vector<MX>(),
true).
get_elements();
1746 std::vector<double> lbg_scaled_ =
value(bounds_lbg_, std::vector<MX>(),
true).
get_elements();
1747 std::vector<double> ubg_scaled_ =
value(bounds_ubg_, std::vector<MX>(),
true).
get_elements();
1751 uout() <<
"Violated constraints (tol " << tol <<
"), in order of declaration:" << std::endl;
1753 for (casadi_int i=0;i<g_.size();++i) {
1754 double err = std::max(g_[i]-ubg_[i], lbg_[i]-g_[i]);
1755 double err_scaled = std::max(g_scaled_[i]-ubg_scaled_[i], lbg_scaled_[i]-g_scaled_[i]);
1758 uout() <<
"/" << g_.size();
1761 if (is_simple_[i]) {
1763 uout() <<
" reduced to bound on x[" << g_index_reduce_x_.at(i) <<
"]";
1765 uout() <<
" reduced to bound on x(" << g_index_reduce_x_.at(i)+1 <<
")";
1768 uout() <<
" reduced to g[" << g_index_reduce_g_.at(i) <<
"]";
1772 uout() <<
" ------ " << std::endl;
1773 uout() << lbg_[i] <<
" <= " << g_[i] <<
" <= " << ubg_[i];
1774 uout() <<
" (viol " << err <<
")" << std::endl;
1775 if (g_[i]!=g_scaled_[i]) {
1776 uout() << lbg_scaled_[i] <<
" <= " << g_scaled_[i] <<
" <= " << ubg_scaled_[i];
1777 uout() <<
" (scaled) (viol " << err_scaled <<
")" << std::endl;
static std::unique_ptr< std::istream > ifstream_ptr(const std::string &path, std::ios_base::openmode mode=std::ios_base::in, bool fail=true)
FunctionInternal(const std::string &name)
Constructor.
FunctionInternal * get() const
const std::vector< std::string > & name_in() const
Get input scheme.
const std::string & name() const
Name of the function.
static Function create(FunctionInternal *node)
Create from node.
Dict stats(int mem=0) const
Get all statistics obtained at the end of the last evaluate call.
const std::vector< std::string > & name_out() const
Get output scheme.
casadi_int numel() const
Get the number of elements.
bool is_dense() const
Check if the matrix expression is dense.
bool is_empty(bool both=false) const
Check if the sparsity is empty, i.e. if one of the dimensions is zero.
std::pair< casadi_int, casadi_int > size() const
Get the shape.
bool is_vector() const
Check if the matrix is a row or column vector.
casadi_int nnz() const
Get the number of (structural) non-zero elements.
casadi_int size2() const
Get the second dimension (i.e. number of columns)
casadi_int size1() const
Get the first dimension (i.e. number of rows)
std::string dim(bool with_nz=false) const
Get string representation of dimensions.
static MatType ones(casadi_int nrow=1, casadi_int ncol=1)
Create a dense matrix or a matrix with specified sparsity with all entries one.
static MX sym(const std::string &name, casadi_int nrow=1, casadi_int ncol=1)
Create an nrow-by-ncol symbolic primitive.
const casadi_int * colind() const
Get the sparsity pattern. See the Sparsity class for details.
static MatType zeros(casadi_int nrow=1, casadi_int ncol=1)
Create a dense matrix or a matrix with specified sparsity with all entries zero.
bool is_scalar(bool scalar_and_dense=false) const
Check if the matrix expression is scalar.
bool is_null() const
Is a null pointer?
static casadi_int start_index
bool is_valid_input() const
Check if matrix can be used to define function inputs.
const Sparsity & sparsity() const
Get the sparsity pattern.
std::string name() const
Get the name.
static std::vector< MX > symvar(const MX &x)
bool is_constant() const
Check if constant.
static MX eye(casadi_int n)
Identity matrix.
MXNode * get() const
Get a const pointer to the node.
bool is_op(casadi_int op) const
Is it a certain operation.
std::vector< MX > primitives() const
Get primitives.
MX dep(casadi_int ch=0) const
Get the nth dependency as MX.
bool is_symbolic() const
Check if symbolic.
std::vector< Scalar > & nonzeros()
Matrix< Scalar > T() const
Transpose the matrix.
const Sparsity & sparsity() const
Const access the sparsity - reference to data member.
void set(const Matrix< Scalar > &m, bool ind1, const Slice &rr)
static Matrix< double > nan(const Sparsity &sp)
create a matrix with all nan
bool is_regular() const
Checks if expression does not contain NaN or Inf.
std::vector< Scalar > get_elements() const
Get all elements.
MX x_lookup(casadi_index i) const
std::string describe(const MX &x, casadi_index indent=0, const Dict &opts=Dict()) const
MX g_lookup(casadi_index i) const
std::string x_describe(casadi_index i, const Dict &opts=Dict()) const
std::string g_describe(casadi_index i, const Dict &opts=Dict()) const
A simplified interface for NLP modeling/solving.
Function solver_construct(bool callback=true)
Dict user_dict(const MX &m) const
casadi_int g_index_reduce_x(casadi_int i) const
std::vector< MX > value_variables() const
get assignment expressions for latest values
std::vector< MX > initial() const
get assignment expressions for initial values
std::vector< MX > active_symvar(VariableType type) const
std::string describe(const MX &x, casadi_int indent=0, const Dict &opts=Dict()) const
MX g() const
Get all (scalarised) constraint expressions as a column vector.
MetaCon get_meta_con(const MX &m) const
Get meta-data of symbol (for internal use only)
OptiAdvanced baked_copy() const
DM g_linear_scale() const
MX dual(const MX &m) const
get the dual variable
std::vector< DM > active_values(VariableType type) const
MX x() const
Get all (scalarised) decision variables as a symbolic column vector.
void minimize(const MX &f, double linear_scale=1)
Set objective.
casadi_int nx() const
Number of (scalarised) decision variables.
MX variable(casadi_int n=1, casadi_int m=1, const std::string &attribute="full")
Create a decision variable (symbol)
std::string x_describe(casadi_int i, const Dict &opts=Dict()) const
void bake()
Fix the structure of the optimization problem.
casadi_int instance_number() const
DM value(const MX &x, const std::vector< MX > &values=std::vector< MX >(), bool scaled=false) const
void disp(std::ostream &stream, bool more=false) const override
Print representation.
void set_domain(const MX &x, const std::string &domain)
Set domain of variable.
OptiSol solve(bool accept_limit)
Crunch the numbers; solve the problem.
bool solver_dirty() const
casadi_int np() const
Number of (scalarised) parameters.
bool has_callback_class() const
MX g_lookup(casadi_int i) const
casadi_int ng() const
Number of (scalarised) constraints.
MetaVar get_meta(const MX &m) const
Get meta-data of symbol (for internal use only)
void subject_to()
Clear constraints.
void assert_baked() const
std::vector< MX > symvar() const
void update_user_dict(const MX &m, const Dict &meta)
add meta-data of an expression
DM x_linear_scale() const
void assert_solved() const
void set_meta(const MX &m, const MetaVar &meta)
Set meta-data of an expression.
MX parameter(casadi_int n=1, casadi_int m=1, const std::string &attribute="full")
Create a parameter (symbol); fixed during optimization.
bool is_parametric(const MX &expr) const
return true if expression is only dependant on Opti parameters, not variables
void assert_active_symbol(const MX &m) const
bool return_success(bool accept_limit) const
Did the solver return successfully?
std::vector< MX > value_parameters() const
bool problem_dirty() const
Function to_function(const std::string &name, const std::vector< MX > &args, const std::vector< MX > &res, const std::vector< std::string > &name_in, const std::vector< std::string > &name_out, const Dict &opts)
Create a CasADi Function from the Opti solver.
DM x_linear_scale_offset() const
MX x_lookup(casadi_int i) const
casadi_int g_index_reduce_g(casadi_int i) const
void mark_problem_dirty(bool flag=true)
OptiNode(const std::string &problem_type)
Create Opti Context.
std::string return_status() const
Get return status of solver.
void assert_empty() const
void set_initial(const MX &x, const DM &v)
void mark_solver_dirty(bool flag=true)
MX p() const
Get all (scalarised) parameters as a symbolic column vector.
casadi_int g_index_unreduce_g(casadi_int i) const
void set_value(const MX &x, const DM &v)
Set value of parameter.
double f_linear_scale() const
MX lam_g() const
Get dual variables as a symbolic column vector.
void set_linear_scale(const MX &x, const DM &scale, const DM &offset)
Set scale of a decision variable.
std::string g_describe(casadi_int i, const Dict &opts=Dict()) const
friend class InternalOptiCallback
void solver(const std::string &solver, const Dict &plugin_options=Dict(), const Dict &solver_options=Dict())
Solver.
MX f() const
Get objective expression.
void mark_solved(bool flag=true)
Function scale_helper(const Function &h) const
Scale a helper function constructed via opti.x, opti.g, ...
Function casadi_solver() const
Get the underlying CasADi solver of the Opti stack.
static OptiNode * create(const std::string &problem_type)
MetaCon canon_expr(const MX &expr, const DM &linear_scale=1) const
Interpret an expression (for internal use only)
void show_infeasibilities(double tol=0, const Dict &opts=Dict()) const
DMDict solve_actual(const DMDict &args)
Dict stats() const
Get statistics.
void set_meta_con(const MX &m, const MetaCon &meta)
Set meta-data of an expression.
A simplified interface for NLP modeling/solving.
A simplified interface for NLP modeling/solving.
Function scale_helper(const Function &h) const
Scale a helper function constructed via opti.x, opti.g, ...
Function to_function(const std::string &name, const std::vector< MX > &args, const std::vector< MX > &res, const Dict &opts=Dict())
Create a CasADi Function from the Opti solver.
static Opti create(OptiNode *node)
void clear_mem()
Clear all memory (called from destructor)
Class representing a Slice.
casadi_int size1() const
Get the number of rows.
static Sparsity dense(casadi_int nrow, casadi_int ncol=1)
Create a dense rectangular sparsity pattern *.
casadi_int nnz() const
Get the number of (structural) non-zeros.
casadi_int size2() const
Get the number of columns.
static Sparsity lower(casadi_int n)
Create a lower triangular square sparsity pattern *.
Function qpsol(const std::string &name, const std::string &solver, const SXDict &qp, const Dict &opts)
Function nlpsol(const std::string &name, const std::string &solver, const SXDict &nlp, const Dict &opts)
casadi_int nlpsol_n_out()
Number of NLP solver outputs.
std::vector< std::string > nlpsol_out()
Get NLP solver output scheme of NLP solvers.
std::map< std::string, MX > MXDict
bool override_num(const std::map< casadi_int, MX > &temp, std::vector< DM > &num, casadi_int i)
T get_from_dict(const std::map< std::string, T > &d, const std::string &key, const T &default_value)
std::string description(Category v)
double if_else_zero(double x, double y)
Conditional assignment.
void assign_vector(const std::vector< S > &s, std::vector< D > &d)
double sign(double x)
Sign function, note that sign(nan) == nan.
std::vector< casadi_int > find(const std::vector< T > &v)
find nonzeros
std::string str(const T &v)
String representation, any type.
GenericType::Dict Dict
C++ equivalent of Python's dict or MATLAB's struct.
bool any(const std::vector< bool > &v)
Check if any arguments are true.
@ OPTI_GENERIC_INEQUALITY
const double nan
Not a number.
bool all(const std::vector< bool > &v)
Check if all arguments are true.
bool is_regular(const std::vector< T > &v)
Checks if array does not contain NaN or Inf.
std::map< std::string, DM > DMDict
std::string filename(const std::string &path)