25 #include "optistack_internal.hpp"
28 #include "function_internal.hpp"
29 #include "global_options.hpp"
33 class InternalOptiCallback :
public FunctionInternal {
36 InternalOptiCallback(OptiNode& sol) :
FunctionInternal(class_name()), sol_(sol) {}
38 ~InternalOptiCallback()
override {
43 std::string class_name()
const override {
return "InternalOptiCallback";}
48 Sparsity get_sparsity_in(casadi_int i)
override {
53 }
else if (n==
"lam_x" || n==
"x") {
55 }
else if (n==
"lam_g" || n==
"g") {
57 }
else if (n==
"p" || n==
"lam_p") {
69 bool has_eval_dm()
const override {
return true;}
72 std::vector<DM> eval_dm(
const std::vector<DM>& arg)
const override {
81 if (sol_.user_callback_) sol_.user_callback_->call(i);
87 bool associated_with(
const OptiNode* o) {
return &sol_==o; }
100 user_callback_ = callback;
104 user_callback_ =
nullptr;
108 return user_callback_ != 0;
111 std::string OptiNode::format_stacktrace(
const Dict& stacktrace, casadi_int indent) {
112 std::string s_indent;
113 for (casadi_int i=0;i<indent;++i) {
117 std::string
filename = stacktrace.at(
"file").as_string();
118 casadi_int line = stacktrace.at(
"line").as_int();
120 std::string name = stacktrace.at(
"name").as_string();
121 if (name!=
"Unknown" && name!=
"<module>")
122 description +=
" in " + stacktrace.at(
"name").as_string();
125 for (casadi_int i=0; i<line-1; ++i) {
126 file.ignore(std::numeric_limits<std::streamsize>::max(),
'\n');
128 std::string contents; std::getline(file, contents);
129 auto it = contents.find_first_not_of(
" \n");
130 if (it!=std::string::npos) {
131 description +=
"\n" + s_indent + contents.substr(it);
141 casadi_int max_stacktrace_depth = 1;
142 for (
const auto& op : opts) {
143 if (op.first==
"max_stacktrace_depth") {
144 max_stacktrace_depth = op.second.as_int();
146 casadi_warning(
"Unknown option '" + op.first +
"'");
149 std::string s_indent;
150 for (casadi_int i=0;i<indent;++i) {
156 description +=
"Opti " + variable_type_to_string(meta(expr).type) +
" '" + expr.
name() +
157 "' of shape " + expr.
dim();
159 auto it = extra.find(
"stacktrace");
160 if (it!=extra.end()) {
161 for (
const Dict& stacktrace : it->second.as_dict_vector()) {
162 description +=
", " + format_stacktrace(stacktrace, indent+1);
163 if (--max_stacktrace_depth==0)
break;
168 if (parse_opti_name(expr.
name(), vt)) {
169 description +=
"Opti " + variable_type_to_string(vt) +
" '" + expr.
name() +
170 "' of shape " + expr.
dim()+
171 ", belonging to a different instance of Opti.";
180 const Dict& extra = meta_con(expr).
extra;
181 auto it = extra.find(
"stacktrace");
182 if (it!=extra.end()) {
183 for (
const Dict& stacktrace : it->second.as_dict_vector()) {
184 description +=
", " + format_stacktrace(stacktrace, indent+1);
185 if (--max_stacktrace_depth==0)
break;
189 std::vector<MX> s =
symvar(expr);
193 description+=
"General expression, dependent on " +
str(s.size()) +
" symbols:";
194 for (casadi_int i=0;i<s.size();++i) {
215 ", part " +
str((casadi_int) local_i / expr.
numel()) +
".";
224 if (symbol.
numel()>1)
231 casadi_assert_dev(i>=0);
232 casadi_assert_dev(i<
nx());
234 for (
const auto& e :
x) {
238 casadi_error(
"Internal error");
244 casadi_assert_dev(i>=0);
245 casadi_assert_dev(i<
ng());
246 for (
const auto& e : g_) {
247 const MetaCon& m = meta_con(e);
250 casadi_error(
"Internal error");
255 count_(0), count_var_(0), count_par_(0), count_dual_(0) {
258 instance_number_ = instance_count_++;
259 user_callback_ =
nullptr;
265 casadi_assert(problem_type==
"nlp" || problem_type==
"conic",
266 "Specified problem type '" + problem_type +
"'unknown. "
267 "Choose 'nlp' (default) or 'conic'.");
268 problem_type_ = problem_type;
283 meta_data.
count = count_++;
284 meta_data.
i = count_var_++;
289 if (attribute==
"symmetric") {
290 casadi_assert(n==m,
"You specified attribute 'symmetric', "
291 "while matrix is not even square, but " +
str(n) +
"-by-" +
str(m) +
".");
292 symbol =
MX::sym(name_prefix() +
"x_" +
str(count_var_), n*(n+1)/2);
294 }
else if (attribute==
"full") {
295 symbol =
MX::sym(name_prefix() +
"x_" +
str(count_var_), n, m);
298 casadi_error(
"Unknown attribute '" + attribute +
"'. Choose from 'full' or 'symmetric'.");
302 symbols_.push_back(symbol);
317 meta_data.
n = symbol.
size1();
318 meta_data.
m = symbol.
size2();
320 meta_data.
count = count_++;
321 meta_data.
i = count_var_++;
324 symbols_.push_back(symbol);
342 meta_data.
count = count_++;
343 meta_data.
i = count_var_++;
345 MX symbol =
MX::sym(name_prefix() +
"x_" +
str(count_var_), sp);
348 symbols_.push_back(symbol);
358 std::string OptiNode::name_prefix()
const {
359 return "opti" +
str(instance_number_) +
"_";
366 void OptiNode::register_dual(
MetaCon& c) {
374 meta_data.
count = count_++;
375 meta_data.
i = count_dual_++;
390 MX flat = vec(symbol);
394 Function
sign = Function(
"sign", {v}, {decide_left_right});
396 ret = MX(ret_sp, sign_map((c.
flipped ? -1 : 1)*flat)[0].T());
398 casadi_int block_size = N / c.
n;
399 std::vector<MX> original_blocks = vertsplit(fabs(flat), block_size);
400 std::vector<MX> blocks(N);
401 for (casadi_int i=0;i<c.
n;++i) {
403 blocks[
p] = original_blocks[i];
405 ret = MX(ret_sp, vertcat(blocks));
409 symbols_.push_back(symbol);
420 casadi_assert_dev(attribute==
"full");
425 meta_data.
n = symbol.
size1();
426 meta_data.
m = symbol.
size2();
428 meta_data.
count = count_++;
429 meta_data.
i = count_par_++;
431 symbols_.push_back(symbol);
439 casadi_assert_dev(attribute==
"full");
447 meta_data.
count = count_++;
448 meta_data.
i = count_par_++;
450 MX symbol =
MX::sym(name_prefix() +
"p_" +
str(count_par_), sp);
451 symbols_.push_back(symbol);
459 casadi_assert_dev(attribute==
"full");
467 meta_data.
count = count_++;
468 meta_data.
i = count_par_++;
470 MX symbol =
MX::sym(name_prefix() +
"p_" +
str(count_par_), n, m);
471 symbols_.push_back(symbol);
480 if (stats_.empty()) {
481 stats_ = solver_.
stats();
483 "detect_simple_bounds_is_simple",
484 std::vector<bool>(
ng(),
false));
486 "detect_simple_bounds_target_x",
487 std::vector<casadi_int>{});
488 casadi_assert_dev(is_simple_.size()==
ng());
490 g_index_reduce_g_.resize(
ng());
491 g_index_reduce_x_.resize(
ng(), -1);
492 g_index_unreduce_g_.resize(
ng(), -1);
494 casadi_int* target_x_ptr = target_x_.data();
497 for (casadi_int i=0;i<is_simple_.size();++i) {
498 if (!is_simple_[i]) {
499 g_index_reduce_g_[i] = k;
500 g_index_unreduce_g_[k] = i;
503 g_index_reduce_g_[i] = -1;
504 g_index_reduce_x_[i] = *target_x_ptr;
509 reduced_ =
any(is_simple_);
516 return g_index_reduce_g_[i];
520 return g_index_unreduce_g_[i];
524 return g_index_reduce_x_[i];
534 if (mystats.find(
"return_status")!=mystats.end()) {
535 std::stringstream ss;
536 ss << mystats.at(
"return_status");
549 bool success =
false;
550 if (mystats.find(
"success")!=mystats.end()) success = mystats.at(
"success");
551 if (!accept_limit)
return success;
553 bool limited =
false;
554 if (mystats.find(
"unified_return_status")!=mystats.end())
555 limited = mystats.at(
"unified_return_status")==
"SOLVER_RET_LIMITED";
556 return success || limited;
564 meta_[m.
get()] = meta;
568 meta_con_[m.
get()] = meta;
575 for (
const auto & it : meta) {
576 m_update.
extra[it.first] = it.second;
577 m_update2.
extra[it.first] = it.second;
584 for (
const auto & it : meta)
585 m_update.
extra[it.first] = it.second;
602 return meta_con(m).
dual;
605 const MetaVar& OptiNode::meta(
const MX& m)
const {
607 auto find = meta_.find(m.
get());
611 const MetaCon& OptiNode::meta_con(
const MX& m)
const {
613 auto find = meta_con_.find(m.get());
617 MetaVar& OptiNode::meta(
const MX& m) {
619 auto find = meta_.find(m.get());
623 MetaCon& OptiNode::meta_con(
const MX& m) {
625 auto find = meta_con_.find(m.get());
637 bool OptiNode::has(
const MX& m)
const {
638 return meta_.find(m.
get())!=meta_.end();
641 bool OptiNode::has_con(
const MX& m)
const {
642 return meta_con_.find(m.get())!=meta_con_.end();
645 void OptiNode::assert_has(
const MX& m)
const {
648 casadi_assert(m.is_symbolic(),
"Symbol expected, got expression.");
649 if (parse_opti_name(m.name(), vt)) {
650 casadi_error(
"Unknown: " +
describe(m));
652 casadi_error(
"Unknown: " +
describe(m) +
"\n"
653 "Note: you cannot use a raw MX.sym in your Opti problem,"
654 " only if you package it in a CasADi Function.");
659 void OptiNode::assert_has_con(
const MX& m)
const {
660 casadi_assert(has_con(m),
"Constraint not present in Opti stack.");
663 casadi_int OptiNode::instance_count_ = 0;
665 bool OptiNode::parse_opti_name(
const std::string& name,
VariableType& vt)
const {
666 casadi_int i = name.find(
"opti");
667 if (i!=0)
return false;
671 if (i==std::string::npos)
return false;
672 if (name.substr(i, 1)==
"x") {
675 }
else if (name.substr(i, 1)==
"p") {
678 }
else if (name.substr(i, 5)==
"lam_g") {
686 std::string OptiNode::variable_type_to_string(
VariableType vt)
const {
687 auto it = VariableType2String_.find(vt);
688 if (it==VariableType2String_.end())
return "unknown variable type";
692 std::map<VariableType, std::string> OptiNode::VariableType2String_ =
699 for (
const auto& e :
symvar()) {
701 ret.push_back(e==store_initial_.at(meta(e).type)[meta(e).i]);
708 for (
const auto& e :
symvar()) {
710 ret.push_back(e==store_latest_.at(meta(e).type)[meta(e).i]);
717 for (
const auto& e :
symvar()) {
719 ret.push_back(e==store_initial_.at(meta(e).type)[meta(e).i]);
725 casadi_assert(!f_.
is_empty() || !g_.empty(),
726 "You need to specify at least an objective (y calling 'minimize'), "
727 "or a constraint (by calling 'subject_to').");
729 symbol_active_.clear();
730 symbol_active_.resize(symbols_.size());
733 MX total_expr = vertcat(f_, veccat(g_));
736 for (
const auto& d :
symvar(total_expr))
737 symbol_active_[meta(d).
count] =
true;
742 casadi_int offset = 0;
743 for (
const auto& v :
x) {
744 meta(v).
start = offset;
746 meta(v).
stop = offset;
752 nlp_[
"x"] = veccat(
x);
753 nlp_[
"p"] = veccat(
p);
755 nlp_unscaled_[
"x"] = veccat(
x);
756 nlp_unscaled_[
"p"] = veccat(
p);
759 for (
const MX& e :
x) {
764 nlp_unscaled_[
"f"] = f_;
767 for (casadi_int i=0;i<g_.size();++i) {
770 symbol_active_[r2.
count] =
true;
782 index_all_to_g_.resize(offset);
784 casadi_int offset_g = 0;
785 for (
const auto&
g : g_) {
788 for (casadi_int i=0;i<r.
stop-r.
start;++i) {
789 index_all_to_g_[r.
start+i] = -1;
792 for (casadi_int i=0;i<r.
stop-r.
start;++i) {
793 index_all_to_g_[r.
start+i] = offset_g+i;
800 for (casadi_int i=0;i<lam.size();++i) meta(lam[i]).
active_i = i;
805 std::vector<MX> g_all, g_unscaled_all;
806 std::vector<MX> h_all, h_unscaled_all;
807 std::vector<MX> lbg_all, lbg_unscaled_all;
808 std::vector<MX> ubg_all, ubg_unscaled_all;
810 g_linear_scale_.clear();
811 h_linear_scale_.clear();
815 for (
const auto&
g : g_) {
817 h_all.push_back(meta_con(
g).canon/meta_con(
g).linear_scale);
818 if (meta_con(
g).canon.numel()==meta_con(
g).linear_scale.numel()) {
819 h_linear_scale.push_back(meta_con(
g).linear_scale);
821 casadi_assert_dev(meta_con(
g).linear_scale.numel()==1);
822 h_linear_scale.push_back(
DM::ones(meta_con(
g).canon.sparsity())*meta_con(
g).linear_scale);
824 h_unscaled_all.push_back(meta_con(
g).canon);
826 g_all.push_back(meta_con(
g).canon/meta_con(
g).linear_scale);
827 if (meta_con(
g).canon.numel()==meta_con(
g).linear_scale.numel()) {
830 casadi_assert_dev(meta_con(
g).linear_scale.numel()==1);
833 g_unscaled_all.push_back(meta_con(
g).canon);
834 lbg_all.push_back(meta_con(
g).lb/meta_con(
g).linear_scale);
835 lbg_unscaled_all.push_back(meta_con(
g).lb);
836 ubg_all.push_back(meta_con(
g).ub/meta_con(
g).linear_scale);
837 ubg_unscaled_all.push_back(meta_con(
g).ub);
839 equality_.insert(equality_.end(),
840 meta_con(
g).canon.numel(),
845 nlp_[
"g"] = veccat(g_all);
847 nlp_unscaled_[
"g"] = veccat(g_unscaled_all);
848 if (problem_type_==
"conic") {
849 nlp_[
"h"] = diagcat(h_all);
850 nlp_unscaled_[
"h"] = diagcat(h_unscaled_all);
851 h_linear_scale_ = veccat(h_linear_scale).nonzeros();
858 linear_scale_ = veccat(linear_scale).nonzeros();
859 linear_scale_offset_ = veccat(linear_scale_offset).nonzeros();
862 std::vector<MX> x_unscaled(
x.
size());
863 for (casadi_int i=0;i<
x.
size();++i) {
864 x_unscaled[i] =
x[i]*linear_scale[i] + linear_scale_offset[i];
868 std::vector<MX> expr = {nlp_[
"f"], nlp_[
"g"]};
869 if (problem_type_==
"conic") expr.push_back(nlp_[
"h"]);
870 std::vector<MX> fgh = substitute(expr,
x, x_unscaled);
873 if (problem_type_==
"conic") {
878 nlp_[
"f"] = nlp_[
"f"]/f_linear_scale_;
882 bounds[
"p"] = nlp_[
"p"];
883 bounds_lbg_ = veccat(lbg_all);
884 bounds_ubg_ = veccat(ubg_all);
885 bounds_unscaled_lbg_ = veccat(lbg_unscaled_all);
886 bounds_unscaled_ubg_ = veccat(ubg_unscaled_all);
888 bounds[
"lbg"] = bounds_lbg_;
889 bounds[
"ubg"] = bounds_ubg_;
891 bounds_ =
Function(
"bounds", bounds, {
"p"}, {
"lbg",
"ubg"});
897 std::string name = h.
name();
903 if (name==
"nlp_jac_g") {
908 args[
"x"] =
arg[
"x"]*x_linear_scale_mx+x_linear_scale_offset_mx;
909 args[
"p"] =
arg[
"p"];
911 for (
const auto & it :
res) {
913 arg[it.first] = it.second*g_linear_scale_inv;
914 }
else if (it.first==
"jac_g_x") {
915 arg[it.first] = mtimes(
916 mtimes(diag(g_linear_scale_inv), it.second),
917 diag(x_linear_scale_mx));
919 casadi_error(
"Unknown output '" + it.first +
"'. Expecting g, jac_g_x.");
924 }
else if (name==
"nlp_hess_l") {
931 args[
"x"] =
arg[
"x"]*x_linear_scale_mx+x_linear_scale_offset_mx;
932 args[
"p"] =
arg[
"p"];
933 args[
"lam_f"] =
arg[
"lam_f"]/f_linear_scale_mx;
934 args[
"lam_g"] =
arg[
"lam_g"]/g_linear_scale_mx;
936 for (
const auto & it :
res) {
937 if (it.first==
"triu_hess_gamma_x_x" ||
938 it.first==
"hess_gamma_x_x" ||
939 (it.second.size1()==
nx() && it.second.is_square())) {
940 MX D = diag(x_linear_scale_mx);
941 arg[it.first] = mtimes(mtimes(
D, it.second),
D);
943 casadi_error(
"Unknown output '" + it.first +
"'. Expecting triu_hess_gamma_x_x");
949 casadi_error(
"Unknown helper function '" + name +
"'");
954 const Dict& solver_options) {
955 solver_name_ = solver_name;
956 solver_options_ = plugin_options;
957 if (!solver_options.empty())
958 solver_options_[solver_name] = solver_options;
962 std::vector<MX> OptiNode::sort(
const std::vector<MX>& v)
const {
966 std::map<casadi_int, MX> unordered;
967 for (
const auto& d : v)
968 unordered[meta(d).
count] = d;
972 for (
auto const &e : unordered)
973 ret.push_back(e.second);
985 std::vector<MX> OptiNode::ineq_unchain(
const MX& a,
bool& flipped) {
992 casadi_assert_dev(!left || !right);
995 return {a.
dep(0), a.
dep(1)};
999 std::vector<MX> ret = {a.
dep(!ineq)};
1002 casadi_assert_dev(!e.is_op(
OP_EQ));
1003 casadi_assert_dev(!e.dep(!ineq).is_op(
OP_LE) && !e.dep(!ineq).is_op(
OP_LT));
1004 ret.push_back(e.dep(!ineq));
1008 if (left) std::reverse(ret.begin(), ret.end());
1014 void OptiNode::assert_only_opti_symbols(
const MX& e)
const {
1016 for (
const auto& s : symbols) assert_has(s);
1019 void OptiNode::assert_only_opti_nondual(
const MX& e)
const {
1021 for (
const auto& s : symbols) {
1023 casadi_assert(meta(s).type!=
OPTI_DUAL_G,
"Dual variables forbidden in this context.");
1037 "Linear scale must have the same size as the expression. "
1038 "You got linear_scale " + con.
linear_scale.
dim() +
" while " + expr.
dim() +
" is expected.");
1042 std::vector<MX> ret;
1044 std::vector<MX> args = ineq_unchain(c, flipped);
1045 std::vector<bool> parametric;
1046 for (
auto &a : args) parametric.push_back(
is_parametric(a));
1048 if (args.size()==2 && (parametric[0] || parametric[1])) {
1050 MX e = args[0]-args[1];
1052 casadi_assert(!parametric[0] || !parametric[1],
1053 "Constraint must contain decision variables.");
1054 if (problem_type_==
"conic") {
1056 args[0] = -soc(args[0].dep(), args[1]);
1061 if (parametric[0]) {
1074 }
else if (args.size()==3 && parametric[0] && parametric[2]) {
1077 con.
lb = args[0]*
DM::ones(args[1].sparsity());
1078 con.
ub = args[2]*
DM::ones(args[1].sparsity());
1085 bool type_known =
false;
1086 for (casadi_int j=0;j<args.size()-1;++j) {
1087 MX e = args[j]-args[j+1];
1088 if (problem_type_==
"conic") {
1090 args[j] = -soc(args[j].dep(), args[j+1]);
1092 e = args[j]-args[j+1];
1109 "Matrix inequalities must be square. Did you mean element-wise inequality instead?");
1122 con.
canon = veccat(ret);
1130 con.
canon = diagcat(ret);
1136 "Constraint must contain decision variables.");
1143 "Constraint shape mismatch.");
1149 "Constraint shape mismatch.");
1167 "This action is forbidden since you have not solved the Opti stack yet "
1168 "(with calling 'solve').");
1173 "This action is forbidden since you have not baked the Opti stack yet "
1174 "(with calling 'solve').");
1178 casadi_assert_dev(g_.empty());
1183 assert_only_opti_nondual(
f);
1185 casadi_assert(
f.
is_scalar(),
"Objective must be scalar, got " +
f.
dim() +
".");
1187 f_linear_scale_ = linear_scale;
1191 assert_only_opti_nondual(
g);
1195 casadi_assert(!
g.
is_empty(),
"You passed an empty expression to `subject_to`. "
1196 "Make sure the number of rows and columns is non-zero. "
1197 "Got " +
g.
dim(
true) +
".");
1198 casadi_assert(
g.
nnz()>0,
"You passed a fully sparse expression to `subject_to`. "
1199 "Make sure the expression has at least one nonzero. "
1200 "Got " +
g.
dim(
true) +
".");
1201 casadi_assert(!
g.
is_constant(),
"You passed a constant to `subject_to`. "
1202 "You need a symbol to form a constraint.");
1206 register_dual(meta_con(
g));
1208 for (
auto && it : options) {
1209 if (it.first==
"stacktrace") {
1210 meta_con(
g).
extra[
"stacktrace"] = it.second.to_dict_vector();
1211 meta(meta_con(
g).dual_canon).
extra[
"stacktrace"] = it.second.to_dict_vector();
1212 }
else if (it.first==
"meta") {
1215 casadi_error(
"Unknown option: " + it.first);
1229 std::vector<MX> ret;
1230 for (
const auto& d :
symvar(expr)) {
1231 if (meta(d).type==type) ret.push_back(d);
1238 const std::vector<double> & x_v =
res.at(
"x").nonzeros();
1240 casadi_int i = meta(v).
i;
1241 std::vector<double> & data_v = store_latest_[
OPTI_VAR][i].nonzeros();
1242 for (casadi_int i=0;i<data_v.size();++i) {
1243 casadi_int j = meta(v).
start+i;
1244 data_v[i] = x_v[j]*linear_scale_[j] + linear_scale_offset_[j];
1247 if (
res.find(
"lam_g")!=
res.end()) {
1248 const std::vector<double> & lam_v =
res.at(
"lam_g").nonzeros();
1250 casadi_int i = meta(v).
i;
1251 std::vector<double> & data_v = store_latest_[
OPTI_DUAL_G][i].nonzeros();
1252 for (casadi_int i=0;i<data_v.size();++i) {
1253 casadi_int j = meta(v).
start+i;
1254 j = index_all_to_g_.at(j);
1256 data_v[i] = lam_v.at(j)/g_linear_scale_.at(j)*f_linear_scale_;
1264 bool OptiNode::old_callback()
const {
1265 if (callback_.
is_null())
return false;
1267 return !cb->associated_with(
this);
1276 for (
const auto&
g : g_) {
1277 if (problem_type_!=
"conic") {
1279 casadi_error(
"Psd constraints not implemented yet. "
1280 "Perhaps you intended an element-wise inequality? "
1281 "In that case, make sure that the matrix is flattened (e.g. mat(:)).");
1285 Dict solver_options_all = solver_options_;
1287 if (solver_options_all.find(
"equality")==solver_options_all.end()) {
1288 solver_options_all[
"equality"] = equality_;
1291 if (solver_options_all.find(
"discrete")==solver_options_all.end()) {
1292 solver_options_all[
"discrete"] = discrete_;
1295 Dict opts = solver_options_all;
1298 if (callback && user_callback_) {
1300 opts[
"iteration_callback"] = callback_;
1303 casadi_assert(!solver_name_.empty(),
1304 "You must call 'solver' on the Opti stack to select a solver. "
1305 "Suggestion: opti.solver('ipopt')");
1307 if (problem_type_==
"conic") {
1308 return qpsol(
"solver", solver_name_, nlp_, opts);
1310 return nlpsol(
"solver", solver_name_, nlp_, opts);
1322 bool solver_update =
solver_dirty() || old_callback() || (user_callback_ && callback_.
is_null());
1324 if (solver_update) {
1335 "Solver failed. You may use opti.debug.value to investigate the latest values of variables."
1336 " return_status is '" + ret +
"'");
1346 for (
const auto&
g : g_) {
1348 casadi_error(
"Constraint type unknown. Use ==, >= or <= .");
1351 if (user_callback_) {
1363 for (casadi_int i=0;i<s.size();++i) {
1365 "You have forgotten to assign a value to a parameter ('set_value'), "
1366 "or have set it to NaN/Inf:\n" +
describe(s[i], 1));
1373 arg[
"p"] = arg_[
"p"];
1375 arg_[
"lbg"] =
res[
"lbg"];
1376 arg_[
"ubg"] =
res[
"ubg"];
1382 return solver_(
arg);
1385 bool override_num(
const std::map<casadi_int, MX> & temp, std::vector<DM>& num, casadi_int i) {
1387 auto it = temp.find(i);
1388 if (it==temp.end()) {
1392 DM t =
static_cast<DM>(it->second);
1403 Function helper =
Function(
"helper", std::vector<MX>{veccat(
x), veccat(
p), veccat(lam)}, {expr});
1405 casadi_error(
"This expression has symbols that are not defined "
1406 "within Opti using variable/parameter.");
1408 std::map<VariableType, std::map<casadi_int, MX> > temp;
1410 for (
const auto& v : values) {
1411 casadi_assert_dev(v.is_op(
OP_EQ));
1412 casadi_int i = meta(v.dep(1)).
i;
1413 casadi_assert_dev(v.dep(0).is_constant());
1414 temp[meta(v.dep(1)).
type][i] = v.dep(0);
1417 bool undecided_vars =
false;
1418 std::vector<DM> x_num;
1419 for (
const auto& e :
x) {
1420 casadi_int i = meta(e).
i;
1421 x_num.push_back(store_latest_.at(
OPTI_VAR).at(i));
1424 x_num.back() = x_num.back()/store_linear_scale_.at(
OPTI_VAR)[meta(e).
i] -
1425 store_linear_scale_offset_.at(
OPTI_VAR)[meta(e).
i];
1429 std::vector<DM> lam_num;
1430 for (
const auto& e : lam) {
1431 casadi_int i = meta(e).
i;
1432 casadi_assert(i<store_latest_.at(
OPTI_DUAL_G).size(),
1433 "This expression has a dual for a constraint that is not given to Opti:\n" +
1435 lam_num.push_back(store_latest_.at(
OPTI_DUAL_G).at(i));
1439 std::vector<DM> p_num;
1440 for (
const auto& e :
p) {
1441 casadi_int i = meta(e).
i;
1442 p_num.push_back(store_initial_.at(
OPTI_PAR).at(i));
1444 casadi_assert(p_num.back().is_regular(),
1445 "This expression depends on a parameter with unset value:\n"+
1449 if (undecided_vars) {
1451 for (
const auto& e :
x)
1452 casadi_assert(symbol_active_[meta(e).count],
1453 "This expression has symbols that do not appear in the constraints and objective:\n" +
1455 for (
const auto& e : lam)
1456 casadi_assert(symbol_active_[meta(e).count],
1457 "This expression has a dual for a constraint that is not given to Opti:\n" +
1461 std::vector<DM>
arg = helper(std::vector<DM>{veccat(x_num), veccat(p_num), veccat(lam_num)});
1468 casadi_assert(symbol_active_[meta(m).count],
"Opti symbol is not used in Solver."
1469 " It does not make sense to assign a value to it:\n" +
describe(m, 1));
1473 for (
const auto& v : assignments) {
1474 casadi_assert_dev(v.is_op(
OP_EQ));
1475 casadi_assert_dev(v.dep(0).is_constant());
1484 casadi_assert(
x.
is_valid_input(),
"First argument to set_domain should be a variable.");
1486 if (domain==
"real") {
1488 }
else if (domain==
"integer") {
1491 casadi_error(
"Unknown domain '" + domain +
"'. Known values are 'real', 'integer'.");
1500 for (
const auto& v : assignments) {
1501 casadi_assert_dev(v.is_op(
OP_EQ));
1502 casadi_assert_dev(v.dep(0).is_constant());
1508 void OptiNode::set_value_internal(
const MX& x,
const DM& v,
1513 DM& target = store[meta(
x).
type][meta(
x).
i];
1521 MX symbols_cat = veccat(symbols);
1523 std::string failmessage =
"You cannot set initial/value of an arbitrary expression. "
1524 "Use symbols or simple mappings of symbols.";
1527 for (
bool b : which_depends(
x, symbols_cat, 2,
false)) casadi_assert(!b, failmessage);
1530 Dict opts = {{
"compact",
true}};
1531 Function Jf(
"Jf", std::vector<MX>{}, std::vector<MX>{jacobian(
x, veccat(symbols), opts)});
1532 DM J = Jf(std::vector<DM>{})[0];
1535 Function Ff(
"Ff", symbols, {
x});
1536 DM E = Ff(std::vector<DM>(symbols.size(), 0))[0];
1537 std::vector<double>& e = E.
nonzeros();
1545 std::vector<casadi_int> filled_rows = sum2(J).get_row();
1546 J = J(filled_rows,
all);
1549 std::vector<casadi_int> row, col;
1550 J.sparsity().get_triplet(row, col);
1551 const std::vector<double>& scaling = J.nonzeros();
1552 const std::vector<double>& data_original =
value.
nonzeros();
1554 std::vector<double> data; data.reserve(
value.
nnz());
1555 for (casadi_int i=0;i<
value.
nnz();++i) {
1556 double v = data_original[i];
1557 casadi_int nz = sp_JT.
colind()[i+1]-sp_JT.colind()[i];
1558 casadi_assert(nz<=1, failmessage);
1562 casadi_assert(v==e[i],
"In initial/value assignment: "
1563 "inconsistent numerical values. At nonzero " +
str(i) +
", lhs has "
1564 +
str(e[i]) +
", while rhs has " +
str(v) +
".");
1569 std::vector<double> temp(symbols_cat.nnz(),
casadi::nan);
1570 for (casadi_int k=0;k<data.size();++k) {
1571 double& lhs = temp[col[k]];
1572 double rhs = data[row[k]]/scaling[row[k]];
1573 if (std::isnan(lhs)) {
1577 casadi_assert(lhs==rhs,
"Initial/value assignment with mapping is ambiguous.");
1581 casadi_int offset = 0;
1582 for (
const auto & s : symbols) {
1583 DM& target = store[meta(s).
type][meta(s).
i];
1584 std::vector<double>& data = target.
nonzeros();
1586 for (casadi_int i=0;i<s.nnz();++i) {
1588 double v = temp[offset+i];
1589 if (!std::isnan(v)) data[i] = v;
1598 casadi_assert(meta(s).type!=
OPTI_PAR,
1599 "You cannot set an initial value for a parameter. Did you mean 'set_value'?");
1600 set_value_internal(
x, v, store_initial_);
1605 casadi_assert(meta(s).type!=
OPTI_VAR,
1606 "You cannot set a value for a variable. Did you mean 'set_initial'?");
1607 set_value_internal(
x, v, store_initial_);
1612 casadi_assert(meta(s).type!=
OPTI_PAR,
1613 "You cannot set a scale value for a parameter.");
1615 "Dimension mismatch in linear_scale. Expected " +
x.
dim() +
", got " + scale.
dim()+
".");
1616 set_value_internal(
x, scale, store_linear_scale_);
1618 "Dimension mismatch in linear_scale offset. Expected " +
x.
dim() +
1619 ", got " + scale.
dim()+
".");
1620 set_value_internal(
x, offset, store_linear_scale_offset_);
1624 if (symbol_active_.empty())
return std::vector<MX>{};
1625 std::vector<MX> ret;
1626 for (
const auto& s : symbols_) {
1627 if (symbol_active_[meta(s).count] && meta(s).type==type)
1638 const std::map<
VariableType, std::vector<DM> >& store)
const {
1639 if (symbol_active_.empty())
return std::vector<DM>{};
1640 std::vector<DM> ret;
1641 for (
const auto& s : symbols_) {
1642 if (symbol_active_[meta(s).count] && meta(s).type==type) {
1643 ret.push_back(store.at(meta(s).type)[meta(s).i]);
1650 const std::vector<MX>& args,
const std::vector<MX>& res,
1651 const std::vector<std::string>& name_in,
1652 const std::vector<std::string>& name_out,
1659 std::vector<MX> x0,
p,
lam_g;
1665 for (
const auto& a : args) {
1666 casadi_assert(a.
is_valid_input(),
"Argument " +
str(k) +
" is not purely symbolic.");
1669 if (!symbol_active_[meta(prim).count])
continue;
1670 casadi_int i = meta(prim).
active_i;
1673 }
else if (meta(prim).type==
OPTI_PAR) {
1678 casadi_error(
"Unknown");
1683 arg[
"p"] = veccat(
p);
1687 arg[
"x0"] = veccat(x0);
1689 arg[
"lbg"] = r[
"lbg"];
1690 arg[
"ubg"] = r[
"ubg"];
1699 std::vector<MX> arg_in = helper(std::vector<MX>{r.at(
"x"),
arg[
"p"], r.at(
"lam_g")});
1701 return Function(name, args, arg_in, name_in, name_out, opts);
1710 return instance_number_;
1717 std::vector<double> g_scaled_ =
value(nlp_.at(
"g"), std::vector<MX>(),
true).
get_elements();
1718 std::vector<double> lbg_scaled_ =
value(bounds_lbg_, std::vector<MX>(),
true).
get_elements();
1719 std::vector<double> ubg_scaled_ =
value(bounds_ubg_, std::vector<MX>(),
true).
get_elements();
1723 uout() <<
"Violated constraints (tol " << tol <<
"), in order of declaration:" << std::endl;
1725 for (casadi_int i=0;i<g_.size();++i) {
1726 double err = std::max(g_[i]-ubg_[i], lbg_[i]-g_[i]);
1727 double err_scaled = std::max(g_scaled_[i]-ubg_scaled_[i], lbg_scaled_[i]-g_scaled_[i]);
1730 uout() <<
"/" << g_.size();
1733 if (is_simple_[i]) {
1735 uout() <<
" reduced to bound on x[" << g_index_reduce_x_.at(i) <<
"]";
1737 uout() <<
" reduced to bound on x(" << g_index_reduce_x_.at(i)+1 <<
")";
1740 uout() <<
" reduced to g[" << g_index_reduce_g_.at(i) <<
"]";
1744 uout() <<
" ------ " << std::endl;
1745 uout() << lbg_[i] <<
" <= " << g_[i] <<
" <= " << ubg_[i];
1746 uout() <<
" (viol " << err <<
")" << std::endl;
1747 if (g_[i]!=g_scaled_[i]) {
1748 uout() << lbg_scaled_[i] <<
" <= " << g_scaled_[i] <<
" <= " << ubg_scaled_[i];
1749 uout() <<
" (scaled) (viol " << err_scaled <<
")" << std::endl;
FunctionInternal(const std::string &name)
Constructor.
FunctionInternal * get() const
const std::vector< std::string > & name_in() const
Get input scheme.
const std::string & name() const
Name of the function.
static Function create(FunctionInternal *node)
Create from node.
bool has_free() const
Does the function have free variables.
Dict stats(int mem=0) const
Get all statistics obtained at the end of the last evaluate call.
const std::vector< std::string > & name_out() const
Get output scheme.
casadi_int numel() const
Get the number of elements.
bool is_dense() const
Check if the matrix expression is dense.
bool is_empty(bool both=false) const
Check if the sparsity is empty, i.e. if one of the dimensions is zero.
std::pair< casadi_int, casadi_int > size() const
Get the shape.
bool is_vector() const
Check if the matrix is a row or column vector.
casadi_int nnz() const
Get the number of (structural) non-zero elements.
casadi_int size2() const
Get the second dimension (i.e. number of columns)
casadi_int size1() const
Get the first dimension (i.e. number of rows)
std::string dim(bool with_nz=false) const
Get string representation of dimensions.
static MatType ones(casadi_int nrow=1, casadi_int ncol=1)
Create a dense matrix or a matrix with specified sparsity with all entries one.
static MX sym(const std::string &name, casadi_int nrow=1, casadi_int ncol=1)
Create an nrow-by-ncol symbolic primitive.
const casadi_int * colind() const
Get the sparsity pattern. See the Sparsity class for details.
static MatType zeros(casadi_int nrow=1, casadi_int ncol=1)
Create a dense matrix or a matrix with specified sparsity with all entries zero.
bool is_scalar(bool scalar_and_dense=false) const
Check if the matrix expression is scalar.
bool is_null() const
Is a null pointer?
static casadi_int start_index
bool is_valid_input() const
Check if matrix can be used to define function inputs.
const Sparsity & sparsity() const
Get the sparsity pattern.
std::string name() const
Get the name.
static std::vector< MX > symvar(const MX &x)
bool is_constant() const
Check if constant.
static MX eye(casadi_int n)
Identity matrix.
MXNode * get() const
Get a const pointer to the node.
bool is_op(casadi_int op) const
Is it a certain operation.
std::vector< MX > primitives() const
Get primitives.
MX dep(casadi_int ch=0) const
Get the nth dependency as MX.
bool is_symbolic() const
Check if symbolic.
std::vector< Scalar > & nonzeros()
Matrix< Scalar > T() const
Transpose the matrix.
const Sparsity & sparsity() const
Const access the sparsity - reference to data member.
void set(const Matrix< Scalar > &m, bool ind1, const Slice &rr)
static Matrix< double > nan(const Sparsity &sp)
create a matrix with all nan
bool is_regular() const
Checks if expression does not contain NaN or Inf.
std::vector< Scalar > get_elements() const
Get all elements.
MX x_lookup(casadi_index i) const
std::string describe(const MX &x, casadi_index indent=0, const Dict &opts=Dict()) const
MX g_lookup(casadi_index i) const
std::string x_describe(casadi_index i, const Dict &opts=Dict()) const
std::string g_describe(casadi_index i, const Dict &opts=Dict()) const
A simplified interface for NLP modeling/solving.
Function solver_construct(bool callback=true)
Dict user_dict(const MX &m) const
casadi_int g_index_reduce_x(casadi_int i) const
std::vector< MX > value_variables() const
get assignment expressions for latest values
std::vector< MX > initial() const
get assignment expressions for initial values
std::vector< MX > active_symvar(VariableType type) const
std::string describe(const MX &x, casadi_int indent=0, const Dict &opts=Dict()) const
MX g() const
Get all (scalarised) constraint expressions as a column vector.
MetaCon get_meta_con(const MX &m) const
Get meta-data of symbol (for internal use only)
OptiAdvanced baked_copy() const
DM g_linear_scale() const
MX dual(const MX &m) const
get the dual variable
std::vector< DM > active_values(VariableType type) const
MX x() const
Get all (scalarised) decision variables as a symbolic column vector.
void minimize(const MX &f, double linear_scale=1)
Set objective.
casadi_int nx() const
Number of (scalarised) decision variables.
MX variable(casadi_int n=1, casadi_int m=1, const std::string &attribute="full")
Create a decision variable (symbol)
std::string x_describe(casadi_int i, const Dict &opts=Dict()) const
void bake()
Fix the structure of the optimization problem.
casadi_int instance_number() const
DM value(const MX &x, const std::vector< MX > &values=std::vector< MX >(), bool scaled=false) const
void disp(std::ostream &stream, bool more=false) const override
Print representation.
void set_domain(const MX &x, const std::string &domain)
Set domain of variable.
OptiSol solve(bool accept_limit)
Crunch the numbers; solve the problem.
bool solver_dirty() const
casadi_int np() const
Number of (scalarised) parameters.
bool has_callback_class() const
MX g_lookup(casadi_int i) const
casadi_int ng() const
Number of (scalarised) constraints.
MetaVar get_meta(const MX &m) const
Get meta-data of symbol (for internal use only)
void subject_to()
Clear constraints.
void assert_baked() const
std::vector< MX > symvar() const
void update_user_dict(const MX &m, const Dict &meta)
add meta-data of an expression
DM x_linear_scale() const
void assert_solved() const
void set_meta(const MX &m, const MetaVar &meta)
Set meta-data of an expression.
MX parameter(casadi_int n=1, casadi_int m=1, const std::string &attribute="full")
Create a parameter (symbol); fixed during optimization.
bool is_parametric(const MX &expr) const
return true if expression is only dependant on Opti parameters, not variables
void assert_active_symbol(const MX &m) const
bool return_success(bool accept_limit) const
Did the solver return successfully?
std::vector< MX > value_parameters() const
bool problem_dirty() const
Function to_function(const std::string &name, const std::vector< MX > &args, const std::vector< MX > &res, const std::vector< std::string > &name_in, const std::vector< std::string > &name_out, const Dict &opts)
Create a CasADi Function from the Opti solver.
DM x_linear_scale_offset() const
MX x_lookup(casadi_int i) const
casadi_int g_index_reduce_g(casadi_int i) const
void mark_problem_dirty(bool flag=true)
OptiNode(const std::string &problem_type)
Create Opti Context.
std::string return_status() const
Get return status of solver.
void assert_empty() const
void set_initial(const MX &x, const DM &v)
void mark_solver_dirty(bool flag=true)
MX p() const
Get all (scalarised) parameters as a symbolic column vector.
casadi_int g_index_unreduce_g(casadi_int i) const
void set_value(const MX &x, const DM &v)
Set value of parameter.
double f_linear_scale() const
MX lam_g() const
Get dual variables as a symbolic column vector.
void set_linear_scale(const MX &x, const DM &scale, const DM &offset)
Set scale of a decision variable.
std::string g_describe(casadi_int i, const Dict &opts=Dict()) const
friend class InternalOptiCallback
void solver(const std::string &solver, const Dict &plugin_options=Dict(), const Dict &solver_options=Dict())
Solver.
MX f() const
Get objective expression.
void mark_solved(bool flag=true)
Function scale_helper(const Function &h) const
Scale a helper function constructed via opti.x, opti.g, ...
Function casadi_solver() const
Get the underlying CasADi solver of the Opti stack.
static OptiNode * create(const std::string &problem_type)
MetaCon canon_expr(const MX &expr, const DM &linear_scale=1) const
Interpret an expression (for internal use only)
void show_infeasibilities(double tol=0, const Dict &opts=Dict()) const
DMDict solve_actual(const DMDict &args)
Dict stats() const
Get statistics.
void set_meta_con(const MX &m, const MetaCon &meta)
Set meta-data of an expression.
A simplified interface for NLP modeling/solving.
A simplified interface for NLP modeling/solving.
Function scale_helper(const Function &h) const
Scale a helper function constructed via opti.x, opti.g, ...
Function to_function(const std::string &name, const std::vector< MX > &args, const std::vector< MX > &res, const Dict &opts=Dict())
Create a CasADi Function from the Opti solver.
static Opti create(OptiNode *node)
void clear_mem()
Clear all memory (called from destructor)
Class representing a Slice.
casadi_int size1() const
Get the number of rows.
static Sparsity dense(casadi_int nrow, casadi_int ncol=1)
Create a dense rectangular sparsity pattern *.
casadi_int nnz() const
Get the number of (structural) non-zeros.
casadi_int size2() const
Get the number of columns.
static Sparsity lower(casadi_int n)
Create a lower triangular square sparsity pattern *.
Function qpsol(const std::string &name, const std::string &solver, const SXDict &qp, const Dict &opts)
Function nlpsol(const std::string &name, const std::string &solver, const SXDict &nlp, const Dict &opts)
casadi_int nlpsol_n_out()
Number of NLP solver outputs.
std::vector< std::string > nlpsol_out()
Get NLP solver output scheme of NLP solvers.
std::map< std::string, MX > MXDict
bool override_num(const std::map< casadi_int, MX > &temp, std::vector< DM > &num, casadi_int i)
T get_from_dict(const std::map< std::string, T > &d, const std::string &key, const T &default_value)
std::string description(Category v)
double if_else_zero(double x, double y)
Conditional assignment.
void assign_vector(const std::vector< S > &s, std::vector< D > &d)
double sign(double x)
Sign function, note that sign(nan) == nan.
std::vector< casadi_int > find(const std::vector< T > &v)
find nonzeros
std::string str(const T &v)
String representation, any type.
GenericType::Dict Dict
C++ equivalent of Python's dict or MATLAB's struct.
bool any(const std::vector< bool > &v)
Check if any arguments are true.
@ OPTI_GENERIC_INEQUALITY
const double nan
Not a number.
bool all(const std::vector< bool > &v)
Check if all arguments are true.
bool is_regular(const std::vector< T > &v)
Checks if array does not contain NaN or Inf.
std::map< std::string, DM > DMDict
std::string filename(const std::string &path)