26 #include "feasiblesqpmethod.hpp"
28 #include "casadi/core/casadi_misc.hpp"
29 #include "casadi/core/calculus.hpp"
30 #include "casadi/core/conic.hpp"
31 #include "casadi/core/conic_impl.hpp"
32 #include "casadi/core/convexify.hpp"
45 int CASADI_NLPSOL_FEASIBLESQPMETHOD_EXPORT
48 plugin->name =
"feasiblesqpmethod";
50 plugin->version = CASADI_VERSION;
73 "The solver type: Either SQP or SLP. Defaults to SQP"}},
76 "The QP solver to be used by the SQP method [qpoases]"}},
79 "Options to be passed to the QP solver"}},
80 {
"hessian_approximation",
82 "limited-memory|exact"}},
85 "Maximum number of SQP iterations"}},
88 "Minimum number of SQP iterations"}},
91 "Stopping criterion for primal infeasibility"}},
94 "Stopping criterion for dual infeasability"}},
97 "Size of memory to store history of merit function values"}},
100 "Size of L-BFGS memory."}},
103 "Print the header with problem statistics"}},
106 "Print the iterations"}},
109 "Print a status message after solving"}},
112 "Function for calculating the objective function (autogenerated by default)"}},
115 "Function for calculating the constraints (autogenerated by default)"}},
118 "Function for calculating the gradient of the objective (autogenerated by default)"}},
121 "Function for calculating the Jacobian of the constraints (autogenerated by default)"}},
124 "Function for calculating the Hessian of the Lagrangian (autogenerated by default)"}},
125 {
"convexify_strategy",
127 "NONE|regularize|eigen-reflect|eigen-clip. "
128 "Strategy to convexify the Lagrange Hessian before passing it to the solver."}},
131 "When using a convexification strategy, make sure that "
132 "the smallest eigenvalue4 is at least this (default: 1e-7)."}},
135 "Maximum number of iterations to compute an eigenvalue decomposition (default: 50)."}},
138 "Initialize the QP subproblems with a feasible initial value (default: false)."}},
141 "Optimality tolerance. Below this value an iterate is considered to be optimal."}},
144 "Feasibility tolerance. Below this tolerance an iterate is considered to be feasible."}},
147 "Initial trust-region radius."}},
150 "Lower eta in trust-region acceptance criterion."}},
153 "Upper eta in trust-region acceptance criterion."}},
156 "Lower alpha in trust-region size criterion."}},
159 "Upper alpha in trust-region size criterion."}},
162 "Trust-region tolerance. "
163 "Below this value another scalar is equal to the trust region radius."}},
166 "Is the trust-region ratio above this value, the step is accepted."}},
169 "Minimum trust-region radius."}},
172 "Maximum trust-region radius."}},
175 "Vector that tells where trust-region is applied."}},
176 {
"contraction_acceptance_value",
178 "If the empirical contraction rate in the feasibility iterations "
179 "is above this value in the heuristics the iterations are aborted."}},
182 "Number of watchdog iterations in feasibility iterations. "
183 "After this amount of iterations, it is checked with the contraction acceptance value, "
184 "if iterations are converging."}},
187 "Maximum number of inner iterations."}},
190 "Use Anderson Acceleration. (default false)"}},
193 "Anderson memory. If Anderson is used default is 1, else default is 0."}},
207 std::string hessian_approximation =
"exact";
209 std::string solve_type =
"SQP";
210 std::string qpsol_plugin =
"qpoases";
238 std::string convexify_strategy =
"none";
239 double convexify_margin = 1e-7;
240 casadi_int max_iter_eig = 200;
243 for (
auto&& op : opts) {
244 if (op.first==
"max_iter") {
246 }
else if (op.first==
"min_iter") {
249 }
else if (op.first==
"use_anderson") {
251 }
else if (op.first==
"anderson_memory") {
254 }
else if (op.first==
"lbfgs_memory") {
256 }
else if (op.first==
"tol_pr") {
258 }
else if (op.first==
"tol_du") {
260 }
else if (op.first==
"hessian_approximation") {
261 hessian_approximation = op.second.to_string();
262 }
else if (op.first==
"solve_type") {
263 solve_type = op.second.to_string();
264 }
else if (op.first==
"qpsol") {
265 qpsol_plugin = op.second.to_string();
266 }
else if (op.first==
"qpsol_options") {
267 qpsol_options = op.second;
268 }
else if (op.first==
"print_header") {
270 }
else if (op.first==
"print_iteration") {
272 }
else if (op.first==
"print_status") {
274 }
else if (op.first==
"hess_lag") {
276 casadi_assert_dev(f.
n_in()==4);
277 casadi_assert_dev(f.
n_out()==1);
279 }
else if (op.first==
"jac_g") {
281 casadi_assert_dev(f.
n_in()==2);
282 casadi_assert_dev(f.
n_out()==1);
284 }
else if (op.first==
"grad_f") {
286 casadi_assert_dev(f.
n_in()==2);
287 casadi_assert_dev(f.
n_out()==1);
289 }
else if (op.first==
"f") {
291 casadi_assert_dev(f.
n_in()==2);
292 casadi_assert_dev(f.
n_out()==1);
294 }
else if (op.first==
"g") {
296 casadi_assert_dev(f.
n_in()==2);
297 casadi_assert_dev(f.
n_out()==1);
306 }
else if (op.first==
"convexify_strategy") {
307 convexify_strategy = op.second.to_string();
308 }
else if (op.first==
"convexify_margin") {
309 convexify_margin = op.second;
310 }
else if (op.first==
"max_iter_eig") {
311 max_iter_eig = op.second;
312 }
else if (op.first==
"init_feasible") {
316 }
else if (op.first ==
"optim_tol") {
318 }
else if (op.first ==
"feas_tol") {
320 }
else if (op.first ==
"tr_rad0") {
322 }
else if (op.first ==
"tr_eta1") {
324 }
else if (op.first ==
"tr_eta2") {
326 }
else if (op.first ==
"tr_alpha1") {
328 }
else if (op.first ==
"tr_alpha2") {
330 }
else if (op.first ==
"tr_tol") {
332 }
else if (op.first ==
"tr_acceptance") {
334 }
else if (op.first ==
"tr_rad_min") {
336 }
else if (op.first ==
"tr_rad_max") {
338 }
else if (op.first ==
"tr_scale_vector") {
340 }
else if (op.first ==
"contraction_acceptance_value") {
342 }
else if (op.first ==
"watchdog") {
344 }
else if (op.first ==
"max_inner_iter") {
351 uout() <<
"print solve type" << solve_type << std::endl;
388 {
"hess:gamma:x:x"}, {{
"gamma", {
"f",
"g"}}});
391 uout() <<
"Sparsity pattern: " <<
Hsp_ << std::endl;
393 if (convexify_strategy!=
"none") {
396 opts[
"strategy"] = convexify_strategy;
397 opts[
"margin"] = convexify_margin;
398 opts[
"max_iter_eig"] = max_iter_eig;
407 casadi_assert(!qpsol_plugin.empty(),
"'qpsol' option has not been set");
423 uout() <<
"Sparsity pattern: " <<
Hsp_ << std::endl;
424 uout() <<
"Sparsity pattern: " <<
Asp_ << std::endl;
443 print(
"-------------------------------------------\n");
444 print(
"This is casadi::Feasiblesqpmethod.\n");
446 print(
"Using exact Hessian\n");
448 print(
"Using limited memory BFGS Hessian approximation\n");
450 print(
"Number of variables: %9d\n",
nx_);
451 print(
"Number of constraints: %9d\n",
ng_);
452 print(
"Number of nonzeros in constraint Jacobian: %9d\n",
Asp_.
nnz());
453 print(
"Number of nonzeros in Lagrangian Hessian: %9d\n",
Hsp_.
nnz());
458 set_feasiblesqpmethod_prob();
470 void Feasiblesqpmethod::set_feasiblesqpmethod_prob() {
481 casadi_int*& iw,
double*& w)
const {
514 return (val_f - val_f_corr) / (-val_m_k);
522 tr_rad =
tr_alpha1_ * casadi_masked_norm_inf(
nx_, d->dx, d->tr_mask);
524 abs(casadi_masked_norm_inf(
nx_, d->dx, d->tr_mask) - tr_rad) <
optim_tol_) {
532 auto d_nlp = &m->
d_nlp;
538 d_nlp->objective = d->f_feas;
541 uout() <<
"ACCEPTED" << std::endl;
544 uout() <<
"REJECTED" << std::endl;
583 print(
"This is not implemented yet!!!");
591 casadi_dense_lsqr_solve(d->z_tmp, d->dx_feas, 1, 1, curr_stage,
nx_, d->gamma);
601 casadi_axpy(curr_stage*
nx_, -1.0, d->anderson_memory_iterate, d->z_tmp);
661 auto d_nlp = &m->
d_nlp;
676 double step_inf_norm = casadi_masked_norm_inf(
nx_, d->dx, d->tr_mask);
677 double prev_step_inf_norm = step_inf_norm;
693 m->arg[0] = d->z_feas;
694 m->arg[1] = d_nlp->p;
695 m->res[0] = d->z_feas +
nx_;
697 uout() <<
"What does it mean that calc_function fails here??" << std::endl;
707 double curr_infeas = prev_infeas;
721 double kappa_watchdog = 0.0;
723 double acc_as_exac = 0.0;
726 double watchdog_prev_inf_norm = prev_step_inf_norm;
737 }
else if (j>0 && (curr_infeas > 1.0 || as_exac > 1.0)) {
770 casadi_clip_min(d->lbdz_feas,
nx_, -tr_rad, d->tr_mask);
783 casadi_vector_fmax(
nx_, d->z_tmp, d->lbdz_feas, d->lbdz_feas);
794 casadi_clip_max(d->ubdz_feas,
nx_, tr_rad, d->tr_mask);
807 casadi_vector_fmin(
nx_, d->z_tmp, d->ubdz_feas, d->ubdz_feas);
834 solve_QP(m, d->Bk, d->gf_feas, d->lbdz_feas, d->ubdz_feas,
835 d->Jk, d->dx_feas, d->dlam_feas, 0);
837 solve_LP(m, d->gf_feas, d->lbdz_feas, d->ubdz_feas,
838 d->Jk, d->dx_feas, d->dlam_feas, 0);
860 step_inf_norm = casadi_masked_norm_inf(
nx_, d->dx_feas, d->tr_mask);
872 m->arg[0] = d->z_feas;
873 m->arg[1] = d_nlp->p;
874 m->res[0] = d->z_feas +
nx_;
876 uout() <<
"What does it mean that calc_function fails here??" << std::endl;
882 curr_infeas = prev_infeas;
885 kappa = step_inf_norm/prev_step_inf_norm;
908 print(
"%6s %9.10f %14s %9.10f %20s %9.10f\n",
"Kappa:", kappa,
909 "Infeasibility:", curr_infeas,
"AsymptoticExactness:", as_exac);
912 acc_as_exac += as_exac;
922 kappa_watchdog = step_inf_norm / watchdog_prev_inf_norm;
923 watchdog_prev_inf_norm = step_inf_norm;
924 print(
"Kappa watchdog: %9.10f\n", kappa_watchdog);
925 if (curr_infeas <
feas_tol_ && as_exac < 0.5) {
950 prev_step_inf_norm = step_inf_norm;
959 auto d_nlp = &m->
d_nlp;
970 int step_accepted = 0;
975 double tr_ratio = 0.0;
983 for (casadi_int i=0;i<
nx_;++i) {
984 d->tr_mask[i] = d->tr_scale_vector[i]!=0;
988 const double one = 1.;
991 std::string
info =
"";
1018 if (m->iter_count == 0) {
1021 m->arg[0] = d_nlp->z;
1022 m->arg[1] = d_nlp->p;
1023 m->res[0] = &d_nlp->objective;
1025 uout() <<
"What does it mean that calc_function fails here??" << std::endl;
1028 m->arg[0] = d_nlp->z;
1029 m->arg[1] = d_nlp->p;
1030 m->res[0] = d_nlp->z +
nx_;
1032 uout() <<
"What does it mean that calc_function fails here??" << std::endl;
1035 m->arg[0] = d_nlp->z;
1036 m->arg[1] = d_nlp->p;
1039 uout() <<
"What does it mean that calc_function fails here??" << std::endl;
1042 m->arg[0] = d_nlp->z;
1043 m->arg[1] = d_nlp->p;
1047 m->return_status =
"Non_Regular_Sensitivities";
1050 print(
"MESSAGE(feasiblesqpmethod): "
1051 "No regularity of sensitivities at current point.\n");
1066 m->arg[0] = d_nlp->z;
1067 m->arg[1] = d_nlp->p;
1069 m->arg[3] = d_nlp->lam +
nx_;
1076 }
else if (m->iter_count==0) {
1080 casadi_bfgs_reset(
Hsp_, d->Bk);
1086 casadi_bfgs(
Hsp_, d->Bk, d->dx, d->gLag, d->gLag_old, m->w);
1092 "No feasible initialization given! "
1093 "Find feasible initialization.\n");
1094 m->return_status =
"No_Feasible_Initialization";
1099 }
else if (step_accepted == 0) {
1101 m->arg[0] = d_nlp->z;
1102 m->arg[1] = d_nlp->p;
1105 uout() <<
"What does it mean that calc_function fails here??" << std::endl;
1108 m->arg[0] = d_nlp->z;
1109 m->arg[1] = d_nlp->p;
1113 m->return_status =
"Non_Regular_Sensitivities";
1116 print(
"MESSAGE(feasiblesqpmethod): "
1117 "No regularity of sensitivities at current point.\n");
1131 m->arg[0] = d_nlp->z;
1132 m->arg[1] = d_nlp->p;
1134 m->arg[3] = d_nlp->lam +
nx_;
1141 }
else if (m->iter_count==0) {
1145 casadi_bfgs_reset(
Hsp_, d->Bk);
1151 casadi_bfgs(
Hsp_, d->Bk, d->dx, d->gLag, d->gLag_old, m->w);
1179 pr_inf, du_inf, dx_norminf, m->reg, tr_rad_prev,
info);
1182 tr_rad_prev = tr_rad;
1187 m->return_status =
"User_Requested_Stop";
1204 print(
"MESSAGE(feasiblesqpmethod): Maximum number of iterations reached.\n");
1206 m->return_status =
"Maximum_Iterations_Exceeded";
1215 casadi_clip_min(d->lbdz,
nx_, -tr_rad, d->tr_mask);
1222 casadi_clip_max(d->ubdz,
nx_, tr_rad, d->tr_mask);
1246 ret =
solve_QP(m, d->Bk, d->gf, d->lbdz, d->ubdz, d->Jk,
1249 ret =
solve_LP(m, d->gf, d->lbdz, d->ubdz, d->Jk,
1258 print(
"MESSAGE(feasiblesqpmethod): "
1259 "Optimal Point Found? Quadratic model is zero. "
1260 "After %d iterations\n", m->iter_count-1);
1261 m->return_status =
"Solve_Succeeded";
1271 if (
print_status_)
print(
"WARNING(feasiblesqpmethod): Indefinite Hessian detected\n");
1280 uout() <<
"Rejected inner iterates" << std::endl;
1283 tr_rad = 0.5 * casadi_masked_norm_inf(
nx_, d->dx, d->tr_mask);
1286 m->arg[0] = d->z_feas;
1287 m->arg[1] = d_nlp->p;
1288 m->res[0] = &d->f_feas;
1290 uout() <<
"What does it mean that calc_function fails here??" << std::endl;
1296 "Trust-region radius smaller than feasibility!! "
1298 m->return_status =
"Trust_Region_Radius_Becomes_Too_Small";
1317 print(
"%4s %9s %14s %9s %9s %9s %9s %7s %5s %7s\n",
1318 "iter",
"m_k",
"objective",
"tr_ratio",
"inf_pr",
1319 "inf_du",
"||d||",
"lg(rg)",
"tr_rad",
"info");
1323 double m_k,
double tr_ratio,
1324 double pr_inf,
double du_inf,
1325 double dx_norm,
double rg,
1327 std::string info)
const {
1328 print(
"%4d %9.2e %14.6e %9.2e %9.2e %9.2e %9.2e ",
1329 iter, m_k, obj, tr_ratio, pr_inf, du_inf, dx_norm);
1331 print(
"%7.2f ", log10(rg));
1336 print(
"%9.5e", tr_rad);
1349 const double* lbdz,
const double* ubdz,
const double* A,
1350 double* x_opt,
double* dlam,
int mode)
const {
1384 const double* lbdz,
const double* ubdz,
const double* A,
1385 double* x_opt,
double* dlam,
int mode)
const {
1431 g.
local(
"m_p",
"const casadi_real",
"*");
1433 g.
local(
"m_f",
"casadi_real");
1434 g.
local(
"m_f_feas",
"casadi_real");
1441 "-casadi_inf",
false);
1443 "casadi_inf",
false);
1444 casadi_assert(
exact_hessian_,
"Codegen implemented for exact Hessian only.",
false);
1450 g.
local(
"d",
"struct casadi_feasiblesqpmethod_data");
1451 g.
local(
"p",
"struct casadi_feasiblesqpmethod_prob");
1453 g <<
"d.prob = &p;\n";
1456 g <<
"p.nlp = &p_nlp;\n";
1459 g.
local(
"m_w",
"casadi_real",
"*");
1461 g.
local(
"m_iw",
"casadi_int",
"*");
1462 g <<
"m_iw = iw;\n";
1463 g.
local(
"m_arg",
"const casadi_real",
"**");
1465 g.
local(
"m_res",
"casadi_real",
"**");
1469 g.
local(
"ret",
"casadi_int");
1473 g.
local(
"iter_count",
"casadi_int");
1478 g.
local(
"step_accepted",
"casadi_int");
1483 g.
local(
"m_k",
"casadi_real");
1487 g.
local(
"tr_ratio",
"casadi_real");
1492 g.
local(
"tr_rad",
"casadi_real");
1493 g <<
"tr_rad = " <<
tr_rad0_ <<
";\n";
1494 g.
local(
"tr_rad_prev",
"casadi_real");
1495 g <<
"tr_rad_prev = " <<
tr_rad0_ <<
";\n";
1506 g <<
"for (casadi_int i = 0; i < " <<
nx_ <<
"; ++i) {\n";
1507 g <<
"d.tr_mask[i] = d.tr_scale_vector != 0;\n";
1514 g.
local(
"one",
"const casadi_real");
1527 g.
comment(
"MAIN OPTIMIZATION LOOP");
1528 g <<
"while (1) {\n";
1530 g <<
"if (iter_count == 0) {;\n";
1540 g <<
"m_arg[0] = d_nlp.z;\n";
1541 g <<
"m_arg[1] = m_p;\n";
1542 g <<
"m_res[0] = &m_f;\n";
1543 std::string nlp_f = g(
get_function(
"nlp_f"),
"m_arg",
"m_res",
"m_iw",
"m_w");
1545 g <<
"if (" + nlp_f +
") return 10;\n";
1555 g <<
"m_arg[0] = d_nlp.z;\n";
1556 g <<
"m_arg[1] = m_p;\n";
1557 g <<
"m_res[0] = d_nlp.z+" +
str(
nx_) +
";\n";
1558 std::string nlp_g = g(
get_function(
"nlp_g"),
"m_arg",
"m_res",
"m_iw",
"m_w");
1560 g <<
"if (" + nlp_g +
") return 20;\n";
1570 g <<
"m_arg[0] = d_nlp.z;\n";
1571 g <<
"m_arg[1] = m_p;\n";
1572 g <<
"m_res[0] = d.gf;\n";
1573 std::string nlp_grad_f = g(
get_function(
"nlp_grad_f"),
"m_arg",
"m_res",
"m_iw",
"m_w");
1575 g <<
"if (" + nlp_grad_f +
") return 30;\n";
1596 g <<
"m_arg[0] = d_nlp.z;\n";
1597 g <<
"m_arg[1] = m_p;\n";
1598 g <<
"m_res[0] = d.Jk;\n";
1599 std::string nlp_jac_g = g(
get_function(
"nlp_jac_g"),
"m_arg",
"m_res",
"m_iw",
"m_w");
1601 g <<
"if (" + nlp_jac_g +
") return 40;\n";
1628 g.
comment(
"Just exact Hessian implemented, GN would be possible!");
1629 g <<
"m_arg[0] = d_nlp.z;\n";
1630 g <<
"m_arg[1] = m_p;\n";
1631 g <<
"m_arg[2] = &one;\n";
1632 g <<
"m_arg[3] = d_nlp.lam+" +
str(
nx_) +
";\n";
1633 g <<
"m_res[0] = d.Bk;\n";
1634 std::string nlp_hess_l = g(
get_function(
"nlp_hess_l"),
"m_arg",
"m_res",
"m_iw",
"m_w");
1636 g <<
"if (" + nlp_hess_l +
") return 70;\n";
1647 std::string viol = g.
max_viol(
nx_+
ng_,
"d_nlp.z",
"d_nlp.lbz",
"d_nlp.ubz");
1648 g <<
"if (" << viol <<
"> " <<
feas_tol_ <<
") {\n";
1649 g <<
"printf(\"MESSAGE(feasiblesqpmethod): "
1650 "No feasible initialization given! Find feasible initialization.\\n\");\n";
1655 g <<
"} else if (step_accepted == 0) {\n";
1664 g <<
"m_arg[0] = d_nlp.z;\n";
1665 g <<
"m_arg[1] = m_p;\n";
1666 g <<
"m_res[0] = d.gf;\n";
1667 nlp_grad_f = g(
get_function(
"nlp_grad_f"),
"m_arg",
"m_res",
"m_iw",
"m_w");
1669 g <<
"if (" + nlp_grad_f +
") return 50;\n";
1689 g <<
"m_arg[0] = d_nlp.z;\n";
1690 g <<
"m_arg[1] = m_p;\n";
1691 g <<
"m_res[0] = d.Jk;\n";
1692 nlp_jac_g = g(
get_function(
"nlp_jac_g"),
"m_arg",
"m_res",
"m_iw",
"m_w");
1694 g <<
"if (" + nlp_jac_g +
") return 60;\n";
1722 g.
comment(
"Just exact Hessian implemented, GN would be possible!");
1723 g <<
"m_arg[0] = d_nlp.z;\n";
1724 g <<
"m_arg[1] = m_p;\n";
1725 g <<
"m_arg[2] = &one;\n";
1726 g <<
"m_arg[3] = d_nlp.lam+" +
str(
nx_) +
";\n";
1727 g <<
"m_res[0] = d.Bk;\n";
1728 nlp_hess_l = g(
get_function(
"nlp_hess_l"),
"m_arg",
"m_res",
"m_iw",
"m_w");
1730 g <<
"if (" + nlp_hess_l +
") return 70;\n";
1739 g.
comment(
"Evaluate the gradient of the Lagrangian");
1740 g << g.
copy(
"d.gf",
nx_,
"d.gLag") <<
"\n";
1741 g << g.
mv(
"d.Jk",
Asp_,
"d_nlp.lam+"+
str(
nx_),
"d.gLag",
true) <<
"\n";
1742 g << g.
axpy(
nx_,
"1.0",
"d_nlp.lam",
"d.gLag") <<
"\n";
1746 g.
comment(
"Primal infeasability");
1747 g.
local(
"pr_inf",
"casadi_real");
1748 g <<
"pr_inf = " << g.
max_viol(
nx_+
ng_,
"d_nlp.z",
"d_nlp.lbz",
"d_nlp.ubz") <<
";\n";
1752 g.
comment(
"inf-norm of lagrange gradient");
1753 g.
local(
"du_inf",
"casadi_real");
1754 g <<
"du_inf = " << g.
norm_inf(
nx_,
"d.gLag") <<
";\n";
1758 g.
comment(
"inf-norm of step");
1759 g.
local(
"dx_norminf",
"casadi_real");
1760 g <<
"dx_norminf = " << g.
norm_inf(
nx_,
"d.dx") <<
";\n";
1771 g <<
"printf(\"%4s %9s %14s %9s %9s %9s %9s %5s\\n\", "
1772 "\"iter\", \"m_k\", \"objective\", \"tr_ratio\", "
1773 "\"inf_pr\",\"inf_du\", \"||d||\", \"tr_rad\");\n";
1774 g <<
"printf(\"%4lld %9.2e %14.6e %9.2e %9.2e %9.2e %9.2e %5.2e\\n\", "
1775 "iter_count, m_k, m_f, tr_ratio, pr_inf, du_inf, dx_norminf, tr_rad_prev);";
1779 g <<
"tr_rad_prev = tr_rad;\n";
1800 g <<
"if (iter_count >= " <<
max_iter_ <<
") {\n";
1802 g << g.
printf(
"MESSAGE(feasiblesqpmethod): "
1803 "Maximum number of iterations reached.\\n") <<
"\n";
1813 g.
comment(
"Formulate the QP");
1814 g.
comment(
"Define the lower bounds");
1815 g << g.
copy(
"d_nlp.lbz",
nx_+
ng_,
"d.lbdz") <<
"\n";
1816 g << g.
axpy(
nx_+
ng_,
"-1.0",
"d_nlp.z",
"d.lbdz") <<
"\n";
1817 g << g.
clip_min(
"d.lbdz",
nx_,
"-tr_rad",
"d.tr_mask") <<
"\n";
1824 g.
comment(
"Define the upper bounds");
1825 g << g.
copy(
"d_nlp.ubz",
nx_+
ng_,
"d.ubdz") <<
"\n";
1826 g << g.
axpy(
nx_+
ng_,
"-1.0",
"d_nlp.z",
"d.ubdz") <<
"\n";
1827 g << g.
clip_max(
"d.ubdz",
nx_,
"tr_rad",
"d.tr_mask") <<
"\n";
1832 g << g.
copy(
"d_nlp.lam",
nx_+
ng_,
"d.dlam") <<
"\n";
1836 g.
comment(
"Increase counter");
1837 g <<
"++iter_count;\n";
1851 codegen_qp_solve(g,
"d.Bk",
"d.gf",
"d.lbdz",
"d.ubdz",
"d.Jk",
"d.dx",
"d.dlam", 0);
1856 g.
comment(
"Eval quadratic model and check for convergence");
1859 g.
comment(
"Checking convergence criteria");
1860 g <<
"if (fabs(m_k) < " <<
optim_tol_ <<
") {\n";
1861 g <<
"printf(\"MESSAGE(feasiblesqpmethod): Optimal Point Found? "
1862 "Quadratic model is zero. After %lld iterations.\\n\", iter_count-1);\n";
1874 g.
comment(
"Detecting indefiniteness");
1879 g.
comment(
"Do the feasibility iterations here");
1882 g <<
"if (ret < 0) {\n";
1883 g <<
"printf(\"Rejected inner iterates\\n\");\n";
1887 g <<
"m_arg[0] = d.z_feas;\n";
1888 g <<
"m_arg[1] = m_p;\n";
1889 g <<
"m_res[0] = &m_f_feas;\n";
1890 nlp_f = g(
get_function(
"nlp_f"),
"m_arg",
"m_res",
"m_iw",
"m_w");
1891 g <<
"if (" + nlp_f +
") return 1;\n";
1896 g <<
"if (tr_rad < "<<
feas_tol_ <<
") {\n";
1898 g <<
"printf(\"MESSAGE: Trust-Region radius smaller than feasibilty!!\\n\");\n";
1904 g.
comment(
"Close the step acceptance loop");
1928 g.
comment(
"Close the loop optimization problem");
1944 const std::string& H,
const std::string& g,
1945 const std::string& lbdz,
const std::string& ubdz,
1946 const std::string& A,
const std::string& x_opt,
1947 const std::string& dlam,
int mode)
const {
1948 for (casadi_int i=0;i<
qpsol_.
n_in();++i) cg <<
"m_arg[" << i <<
"] = 0;\n";
1949 cg <<
"m_arg[" <<
CONIC_H <<
"] = " << H <<
";\n";
1950 cg <<
"m_arg[" <<
CONIC_G <<
"] = " << g <<
";\n";
1951 cg <<
"m_arg[" <<
CONIC_X0 <<
"] = " << x_opt <<
";\n";
1952 cg <<
"m_arg[" <<
CONIC_LAM_X0 <<
"] = " << dlam <<
";\n";
1953 cg <<
"m_arg[" <<
CONIC_LAM_A0 <<
"] = " << dlam <<
"+" <<
nx_ <<
";\n";
1954 cg <<
"m_arg[" <<
CONIC_LBX <<
"] = " << lbdz <<
";\n";
1955 cg <<
"m_arg[" <<
CONIC_UBX <<
"] = " << ubdz <<
";\n";
1956 cg <<
"m_arg[" <<
CONIC_A <<
"] = " << A <<
";\n";
1957 cg <<
"m_arg[" <<
CONIC_LBA <<
"] = " << lbdz <<
"+" <<
nx_ <<
";\n";
1958 cg <<
"m_arg[" <<
CONIC_UBA <<
"] = " << ubdz <<
"+" <<
nx_ <<
";\n";
1959 for (casadi_int i=0;i<
qpsol_.
n_out();++i) cg <<
"m_res[" << i <<
"] = 0;\n";
1960 cg <<
"m_res[" <<
CONIC_X <<
"] = " << x_opt <<
";\n";
1961 cg <<
"m_res[" <<
CONIC_LAM_X <<
"] = " << dlam <<
";\n";
1962 cg <<
"m_res[" <<
CONIC_LAM_A <<
"] = " << dlam <<
"+" <<
nx_ <<
";\n";
1963 std::string flag = cg(
qpsol_,
"m_arg",
"m_res",
"m_iw",
"m_w");
1964 cg <<
"ret = " << flag <<
";\n";
1965 cg <<
"if (ret == -1000) return -1000;\n";
1969 const std::string& tr_rad,
const std::string& tr_ratio)
const {
1970 cg <<
"if (tr_ratio < " <<
tr_eta1_ <<
") {\n";
1972 std::string tol =
"fabs(" + cg.
masked_norm_inf(
nx_,
"d.dx",
"d.tr_mask") +
" - tr_rad)";
1973 cg <<
"} else if (tr_ratio > " <<
tr_eta2_ <<
" && " << tol <<
" < " <<
optim_tol_ <<
" ) {\n";
1976 cg.
comment(
"else: keep trust-region as it is....");
1987 cg <<
"m_k = 0.5*" << cg.
bilin(
"d.Bk",
Hsp_,
"d.dx",
"d.dx")
1988 <<
"+" << cg.
dot(
nx_,
"d.gf",
"d.dx") <<
";\n";
1992 const std::string& val_f,
const std::string& val_f_corr,
const std::string& val_m_k)
const {
1994 cg <<
"tr_ratio = (" + val_f +
"-" + val_f_corr +
") / (-" + val_m_k +
");\n";
1998 const std::string& tr_ratio)
const {
2016 cg << cg.
copy(
"d.z_feas",
nx_ +
ng_,
"d_nlp.z") <<
"\n";
2017 cg <<
"m_f = m_f_feas;\n";
2018 cg << cg.
copy(
"d.dlam_feas",
nx_ +
ng_,
"d_nlp.lam") <<
"\n";
2019 cg <<
"printf(\"ACCEPTED\\n\");\n";
2022 cg <<
"printf(\"REJECTED\\n\");\n";
2023 cg <<
"ret = -1;\n";
2028 const std::string& tr_rad)
const {
2033 cg << cg.
copy(
"d.dx",
nx_,
"d.dx_feas") <<
"\n";
2036 cg << cg.
copy(
"d.dlam",
nx_,
"d.dlam_feas") <<
"\n";
2041 cg << cg.
copy(
"d.dlam",
nx_+
ng_,
"d.z_tmp") <<
"\n";
2042 cg << cg.
axpy(
nx_+
ng_,
"-1.0",
"d_nlp.lam",
"d.z_tmp") <<
"\n";
2047 cg.
local(
"step_inf_norm",
"casadi_real");
2049 cg.
local(
"prev_step_inf_norm",
"casadi_real");
2050 cg <<
"prev_step_inf_norm = step_inf_norm;\n";
2056 cg << cg.
copy(
"d_nlp.z",
nx_+
ng_,
"d.z_feas") <<
"\n";
2057 cg << cg.
axpy(
nx_,
"1.0",
"d.dx_feas",
"d.z_feas") <<
"\n";
2077 cg <<
"m_arg[0] = d.z_feas;\n";
2078 cg <<
"m_arg[1] = m_p;\n";
2079 cg <<
"m_res[0] = d.z_feas+" +
str(
nx_) +
";\n";
2080 std::string nlp_g = cg(
get_function(
"nlp_g"),
"m_arg",
"m_res",
"m_iw",
"m_w");
2082 cg <<
"if (" + nlp_g +
") return 100;\n";
2086 cg.
local(
"inner_iter",
"casadi_int");
2091 cg.
local(
"prev_infeas",
"casadi_real");
2092 cg <<
"prev_infeas =" << cg.
max_viol(
nx_+
ng_,
"d.z_feas",
"d_nlp.lbz",
"d_nlp.ubz") <<
";\n";
2093 cg.
local(
"curr_infeas",
"casadi_real");
2094 cg <<
"curr_infeas = prev_infeas;\n";
2102 cg << cg.
copy(
"d.dx",
nx_,
"d.z_tmp") <<
"\n";
2103 cg << cg.
axpy(
nx_,
"-1.0",
"d.z_feas",
"d.z_tmp") <<
"\n";
2104 cg << cg.
axpy(
nx_,
"1.0",
"d_nlp.z",
"d.z_tmp") <<
"\n";
2105 cg.
local(
"as_exac",
"casadi_real");
2106 cg <<
"as_exac =" << cg.
norm_2(
nx_,
"d.z_tmp") <<
"/" << cg.
norm_2(
nx_,
"d.dx") <<
";\n";
2111 cg.
local(
"kappa_watchdog",
"casadi_real");
2113 cg.
local(
"kappa",
"casadi_real");
2115 cg.
local(
"acc_as_exac",
"casadi_real");
2116 cg <<
"acc_as_exac = 0.0;\n";
2119 cg.
local(
"watchdog_prev_inf_norm",
"casadi_real");
2121 cg <<
"watchdog_prev_inf_norm = prev_step_inf_norm;\n";
2137 cg <<
"if (curr_infeas < " <<
feas_tol_ <<
") {\n";
2138 cg <<
"inner_iter = j;\n";
2139 cg <<
"if (as_exac < 0.5) {\n";
2140 cg <<
"ret = 0; \n";
2143 cg <<
"ret = -1;\n";
2146 cg <<
"} else if (j>0 && (curr_infeas > 1.0 || as_exac > 1.0)) {\n";
2147 cg <<
"ret = -1;\n";
2153 cg <<
"inner_iter =j+1;\n";
2159 cg << cg.
copy(
"d.z_feas",
nx_,
"d.z_tmp") <<
"\n";
2160 cg << cg.
axpy(
nx_,
"-1.0",
"d_nlp.z",
"d.z_tmp") <<
"\n";
2161 cg << cg.
copy(
"d.gf",
nx_,
"d.gf_feas") <<
"\n";
2166 cg.
comment(
"Just SQP implemented so far!");
2168 cg << cg.
mv(
"d.Bk",
Hsp_,
"d.z_tmp",
"d.gf_feas",
true) <<
"\n";
2187 cg << cg.
copy(
"d_nlp.lbz",
nx_,
"d.lbdz_feas") <<
"\n";
2188 cg << cg.
clip_min(
"d.lbdz_feas",
nx_,
"-tr_rad",
"d.tr_mask") <<
"\n";
2193 cg << cg.
axpy(
nx_,
"-1.0",
"d.z_feas",
"d.lbdz_feas") <<
"\n";
2194 cg << cg.
axpy(
nx_,
"1.0",
"d_nlp.z",
"d.lbdz_feas") <<
"\n";
2199 cg << cg.
copy(
"d_nlp.lbz",
nx_,
"d.z_tmp") <<
"\n";
2200 cg << cg.
axpy(
nx_,
"-1.0",
"d.z_feas",
"d.z_tmp") <<
"\n";
2204 cg << cg.
vector_fmax(
nx_,
"d.z_tmp",
"d.lbdz_feas",
"d.lbdz_feas");
2208 cg << cg.
copy(
"d_nlp.ubz",
nx_,
"d.ubdz_feas") <<
"\n";
2209 cg << cg.
clip_max(
"d.ubdz_feas",
nx_,
"tr_rad",
"d.tr_mask") <<
";\n";
2213 cg << cg.
axpy(
nx_,
"-1.0",
"d.z_feas",
"d.ubdz_feas") <<
"\n";
2214 cg << cg.
axpy(
nx_,
"1.0",
"d_nlp.z",
"d.ubdz_feas") <<
"\n";
2220 cg << cg.
copy(
"d_nlp.ubz",
nx_,
"d.z_tmp") <<
"\n";
2221 cg << cg.
axpy(
nx_,
"-1.0",
"d.z_feas",
"d.z_tmp") <<
"\n";
2222 cg << cg.
vector_fmin(
nx_,
"d.z_tmp",
"d.ubdz_feas",
"d.ubdz_feas");
2231 cg.
comment(
"Just SQP implemented. Solve the feasible QP");
2233 "d.Jk",
"d.dx_feas",
"d.dlam_feas", 0);
2237 cg <<
"step_inf_norm = " << cg.
masked_norm_inf(
nx_,
"d.dx_feas",
"d.tr_mask") <<
";\n";
2244 cg.
comment(
"No Anderson Acceleration implemented yet.");
2245 cg << cg.
axpy(
nx_,
"1.0",
"d.dx_feas",
"d.z_feas") <<
"\n";
2255 cg <<
"m_arg[0] = d.z_feas;\n";
2256 cg <<
"m_arg[1] = m_p;\n";
2257 cg <<
"m_res[0] = d.z_feas+" +
str(
nx_) +
";\n";
2258 nlp_g = cg(
get_function(
"nlp_g"),
"m_arg",
"m_res",
"m_iw",
"m_w");
2260 cg <<
"if (" + nlp_g +
") return 100;\n";
2265 cg <<
"prev_infeas =" << cg.
max_viol(
nx_+
ng_,
"d.z_feas",
"d_nlp.lbz",
"d_nlp.ubz") <<
";\n";
2266 cg <<
"curr_infeas = prev_infeas;\n";
2267 cg <<
"kappa = step_inf_norm/prev_step_inf_norm;";
2273 cg << cg.
copy(
"d.dx",
nx_,
"d.z_tmp") <<
"\n";
2274 cg << cg.
axpy(
nx_,
"-1.0",
"d.z_feas",
"d.z_tmp") <<
"\n";
2275 cg << cg.
axpy(
nx_,
"1.0",
"d_nlp.z",
"d.z_tmp") <<
"\n";
2276 cg.
local(
"as_exac",
"casadi_real");
2277 cg <<
"as_exac =" << cg.
norm_2(
nx_,
"d.z_tmp") <<
"/" << cg.
norm_2(
nx_,
"d.dx") <<
";\n";
2279 cg <<
"printf(\"Kappa: %9.10f, Infeasibility: %9.10f, "
2280 "AsymptoticExctness: %9.10f\\n\", kappa, curr_infeas, as_exac);\n";
2283 cg <<
"acc_as_exac += as_exac;\n";
2301 cg <<
"if (inner_iter % " <<
watchdog_ <<
"== 0) {\n";
2302 cg <<
"kappa_watchdog = step_inf_norm / watchdog_prev_inf_norm;\n";
2303 cg <<
"watchdog_prev_inf_norm = step_inf_norm;\n";
2304 cg <<
"printf(\"Kappa watchdog: %9.10f\\n\", kappa_watchdog);\n";
2305 cg <<
"if (curr_infeas < "<<
feas_tol_ <<
"&& as_exac < 0.5) {\n";
2310 <<
"acc_as_exac/" <<
watchdog_ <<
"> 0.5) {\n";
2311 cg <<
"ret = -1;\n";
2315 cg <<
"acc_as_exac = 0.0;\n";
2319 cg <<
"prev_step_inf_norm = step_inf_norm;\n";
2327 cg <<
"ret = -1;\n";
2336 stats[
"iter_count"] = m->iter_count;
2341 int version = s.
version(
"Feasiblesqpmethod", 1, 3);
2375 s.
unpack(
"Feasiblesqpmethod::Hrsp", Hrsp);
2379 double convexify_margin;
2380 s.
unpack(
"Feasiblesqpmethod::convexify_margin", convexify_margin);
2381 char convexify_strategy;
2382 s.
unpack(
"Feasiblesqpmethod::convexify_strategy", convexify_strategy);
2383 casadi_assert(convexify_strategy==0,
"deserializtion failed.");
2385 s.
unpack(
"Feasiblesqpmethod::Hsp_project", Hsp_project);
2387 s.
unpack(
"Feasiblesqpmethod::scc_transform", scc_transform);
2388 std::vector<casadi_int> scc_offset;
2389 s.
unpack(
"Feasiblesqpmethod::scc_offset", scc_offset);
2390 std::vector<casadi_int> scc_mapping;
2391 s.
unpack(
"Feasiblesqpmethod::scc_mapping", scc_mapping);
2392 casadi_int max_iter_eig;
2393 s.
unpack(
"Feasiblesqpmethod::max_iter_eig", max_iter_eig);
2394 casadi_int block_size;
2395 s.
unpack(
"Feasiblesqpmethod::block_size", block_size);
2397 s.
unpack(
"Feasiblesqpmethod::scc_sp", scc_sp);
2404 set_feasiblesqpmethod_prob();
2409 s.
version(
"Feasiblesqpmethod", 3);
2423 s.
pack(
"Feasiblesqpmethod::Hsp",
Hsp_);
2424 s.
pack(
"Feasiblesqpmethod::Asp",
Asp_);
Helper class for C code generation.
std::string axpy(casadi_int n, const std::string &a, const std::string &x, const std::string &y)
Codegen axpy: y += a*x.
std::string clip_min(const std::string &x, casadi_int n, const std::string &min, const std::string &mask)
Codegen clip_min: Clips the smaller entries in a vector than min to the min.
std::string add_dependency(const Function &f)
Add a function dependency.
std::string arg(casadi_int i) const
Refer to argument.
std::string norm_2(casadi_int n, const std::string &x)
norm_2
std::string copy(const std::string &arg, std::size_t n, const std::string &res)
Create a copy operation.
void comment(const std::string &s)
Write a comment line (ignored if not verbose)
std::string masked_norm_inf(casadi_int n, const std::string &x, const std::string &mask)
codegen masked_norm_inf: The mask tells what entry is used in the inf-norm.
std::string constant(const std::vector< casadi_int > &v)
Represent an array constant; adding it when new.
std::string fmin(const std::string &x, const std::string &y)
fmin
std::string printf(const std::string &str, const std::vector< std::string > &arg=std::vector< std::string >())
Printf.
std::string bilin(const std::string &A, const Sparsity &sp_A, const std::string &x, const std::string &y)
Codegen bilinear form.
std::string bound_consistency(casadi_int n, const std::string &x, const std::string &lam, const std::string &lbx, const std::string &ubx)
bound_consistency
std::string vector_fmax(casadi_int n, const std::string &x, const std::string &y, const std::string &z)
Codegen vector_fmax: Takes vectorwise max of a vector and writes the result to second vector.
std::string mv(const std::string &x, const Sparsity &sp_x, const std::string &y, const std::string &z, bool tr)
Codegen sparse matrix-vector multiplication.
void local(const std::string &name, const std::string &type, const std::string &ref="")
Declare a local variable.
std::string res(casadi_int i) const
Refer to resuly.
std::string norm_inf(casadi_int n, const std::string &x)
norm_inf
std::string vector_fmin(casadi_int n, const std::string &x, const std::string &y, const std::string &z)
Codegen vector_fmin: Takes vectorwise min of a vector and writes the result to second vector.
void init_local(const std::string &name, const std::string &def)
Specify the default value for a local variable.
std::string dot(casadi_int n, const std::string &x, const std::string &y)
Codegen inner product.
std::string clip_max(const std::string &x, casadi_int n, const std::string &min, const std::string &mask)
Codegen clip_max: Clips the larger entries in a vector than max to the max.
void copy_check(const std::string &arg, std::size_t n, const std::string &res, bool check_lhs=true, bool check_rhs=true)
std::string max_viol(casadi_int n, const std::string &x, const std::string &lb, const std::string &ub)
max_viol
std::string sparsity(const Sparsity &sp, bool canonical=true)
void copy_default(const std::string &arg, std::size_t n, const std::string &res, const std::string &def, bool check_rhs=true)
std::string clear(const std::string &res, std::size_t n)
Create a fill operation.
void add_auxiliary(Auxiliary f, const std::vector< std::string > &inst={"casadi_real"})
Add a built-in auxiliary function.
static void serialize(SerializingStream &s, const std::string &prefix, const ConvexifyData &d)
static Sparsity setup(ConvexifyData &d, const Sparsity &H, const Dict &opts=Dict(), bool inplace=true)
static MXNode * deserialize(DeserializingStream &s)
Deserialize without type information.
Helper class for Serialization.
void unpack(Sparsity &e)
Reconstruct an object from the input stream.
void version(const std::string &name, int v)
~Feasiblesqpmethod() override
void codegen_eval_tr_ratio(CodeGenerator &cg, const std::string &val_f, const std::string &val_f_corr, const std::string &val_m_k) const
void codegen_body(CodeGenerator &g) const override
Generate code for the function body.
Feasiblesqpmethod(const std::string &name, const Function &nlp)
static const std::string meta_doc
A documentation string.
casadi_int max_inner_iter_
void codegen_eval_m_k(CodeGenerator &cg) const
int step_update(void *mem, double tr_ratio) const
static const Options options_
Options.
casadi_int lbfgs_memory_
Memory size of L-BFGS method.
void codegen_step_update(CodeGenerator &cg, const std::string &tr_ratio) const
int init_mem(void *mem) const override
Initalize memory block.
void print_iteration() const
Print iteration header.
bool init_feasible_
Initialize feasible qp's.
ConvexifyData convexify_data_
Data for convexification.
bool use_sqp_
Exact Hessian?
void codegen_declarations(CodeGenerator &g) const override
Generate code for the declarations of the C function.
void codegen_feasibility_iterations(CodeGenerator &cg, const std::string &tr_rad) const
bool use_anderson_
Use Anderson Acceleration.
Dict get_stats(void *mem) const override
Get all statistics.
void init(const Dict &opts) override
Initialize.
int feasibility_iterations(void *mem, double tr_rad) const
int solve(void *mem) const override
virtual int solve_QP(FeasiblesqpmethodMemory *m, const double *H, const double *g, const double *lbdz, const double *ubdz, const double *A, double *x_opt, double *dlam, int mode) const
casadi_int max_iter_
Maximum, minimum number of SQP iterations.
virtual int solve_LP(FeasiblesqpmethodMemory *m, const double *g, const double *lbdz, const double *ubdz, const double *A, double *x_opt, double *dlam, int mode) const
casadi_feasiblesqpmethod_prob< double > p_
void tr_update(void *mem, double &tr_rad, double tr_ratio) const
std::vector< double > tr_scale_vector_
static Nlpsol * creator(const std::string &name, const Function &nlp)
Create a new NLP Solver.
bool convexify_
convexify?
double eval_tr_ratio(double val_f, double val_f_corr, double val_m_k) const
void anderson_acc_step_update(void *mem, casadi_int iter_index) const
casadi_int sz_anderson_memory_
Function qpsol_ela_
QP solver for elastic mode subproblems.
Function qpsol_
QP solver for the subproblems.
void codegen_qp_solve(CodeGenerator &cg, const std::string &H, const std::string &g, const std::string &lbdz, const std::string &ubdz, const std::string &A, const std::string &x_opt, const std::string &dlam, int mode) const
static ProtoFunction * deserialize(DeserializingStream &s)
Deserialize into MX.
bool exact_hessian_
Exact Hessian?
double eval_m_k(void *mem) const
void anderson_acc_init_memory(void *mem, double *step, double *iterate) const
void set_work(void *mem, const double **&arg, double **&res, casadi_int *&iw, double *&w) const override
Set the (persistent) work vectors.
void anderson_acc_update_memory(void *mem, double *step, double *iterate) const
double contraction_acceptance_value_
double tol_pr_
Tolerance of primal and dual infeasibility.
void codegen_tr_update(CodeGenerator &cg, const std::string &tr_rad, const std::string &tr_ratio) const
void serialize_body(SerializingStream &s) const override
Serialize an object without type information.
void alloc_iw(size_t sz_iw, bool persistent=false)
Ensure required length of iw field.
size_t sz_w() const
Get required length of w field.
virtual Dict info() const
void alloc_w(size_t sz_w, bool persistent=false)
Ensure required length of w field.
void alloc(const Function &f, bool persistent=false, int num_threads=1)
Ensure work vectors long enough to evaluate function.
size_t sz_iw() const
Get required length of iw field.
casadi_int n_out() const
Get the number of function outputs.
casadi_int n_in() const
Get the number of function inputs.
NLP solver storage class.
Dict get_stats(void *mem) const override
Get all statistics.
static const Options options_
Options.
void codegen_body_enter(CodeGenerator &g) const override
Generate code for the function body.
void codegen_declarations(CodeGenerator &g) const override
Generate code for the declarations of the C function.
void init(const Dict &opts) override
Initialize.
casadi_int ng_
Number of constraints.
int init_mem(void *mem) const override
Initalize memory block.
casadi_nlpsol_prob< double > p_nlp_
void serialize_body(SerializingStream &s) const override
Serialize an object without type information.
casadi_int np_
Number of parameters.
int callback(NlpsolMemory *m) const
casadi_int nx_
Number of variables.
void set_work(void *mem, const double **&arg, double **&res, casadi_int *&iw, double *&w) const override
Set the (persistent) work vectors.
bool bound_consistency_
Options.
void set_function(const Function &fcn, const std::string &fname, bool jit=false)
Function create_function(const Function &oracle, const std::string &fname, const std::vector< std::string > &s_in, const std::vector< std::string > &s_out, const Function::AuxOut &aux=Function::AuxOut(), const Dict &opts=Dict())
int calc_function(OracleMemory *m, const std::string &fcn, const double *const *arg=nullptr, int thread_id=0) const
std::vector< std::string > get_function() const override
Get list of dependency functions.
bool has_function(const std::string &fname) const override
static void registerPlugin(const Plugin &plugin, bool needs_lock=true)
Register an integrator in the factory.
void print(const char *fmt,...) const
C-style formatted printing during evaluation.
bool verbose_
Verbose printout.
void clear_mem()
Clear all memory (called from destructor)
Helper class for Serialization.
void version(const std::string &name, int v)
void pack(const Sparsity &e)
Serializes an object to the output stream.
static Sparsity dense(casadi_int nrow, casadi_int ncol=1)
Create a dense rectangular sparsity pattern *.
casadi_int nnz() const
Get the number of (structural) non-zeros.
bool is_symmetric() const
Is symmetric?
Function conic(const std::string &name, const std::string &solver, const SpDict &qp, const Dict &opts)
@ NLPSOL_P
Value of fixed parameters (np x 1)
@ NLPSOL_UBX
Decision variables upper bound (nx x 1), default +inf.
@ NLPSOL_X0
Decision variables, initial guess (nx x 1)
@ NLPSOL_LAM_G0
Lagrange multipliers for bounds on G, initial guess (ng x 1)
@ NLPSOL_LAM_X0
Lagrange multipliers for bounds on X, initial guess (nx x 1)
@ NLPSOL_LBG
Constraints lower bound (ng x 1), default -inf.
@ NLPSOL_LBX
Decision variables lower bound (nx x 1), default -inf.
T1 casadi_max_viol(casadi_int n, const T1 *x, const T1 *lb, const T1 *ub)
Largest bound violation.
@ NLPSOL_G
Constraints function at the optimal solution (ng x 1)
@ NLPSOL_X
Decision variables at the optimal solution (nx x 1)
@ NLPSOL_LAM_P
Lagrange multipliers for bounds on P at the solution (np x 1)
@ NLPSOL_F
Cost function value at the optimal solution (1 x 1)
@ NLPSOL_LAM_G
Lagrange multipliers for bounds on G at the solution (ng x 1)
@ NLPSOL_LAM_X
Lagrange multipliers for bounds on X at the solution (nx x 1)
@ CONIC_UBA
dense, (nc x 1)
@ CONIC_A
The matrix A: sparse, (nc x n) - product with x must be dense.
@ CONIC_G
The vector g: dense, (n x 1)
@ CONIC_LBA
dense, (nc x 1)
@ CONIC_UBX
dense, (n x 1)
@ CONIC_LBX
dense, (n x 1)
T1 casadi_bilin(const T1 *A, const casadi_int *sp_A, const T1 *x, const T1 *y)
void casadi_copy(const T1 *x, casadi_int n, T1 *y)
COPY: y <-x.
void casadi_fill(T1 *x, casadi_int n, T1 alpha)
FILL: x <- alpha.
T1 casadi_norm_2(casadi_int n, const T1 *x)
NORM_2: ||x||_2 -> return.
int CASADI_NLPSOL_FEASIBLESQPMETHOD_EXPORT casadi_register_nlpsol_feasiblesqpmethod(Nlpsol::Plugin *plugin)
std::string str(const T &v)
String representation, any type.
GenericType::Dict Dict
C++ equivalent of Python's dict or MATLAB's struct.
T1 casadi_dot(casadi_int n, const T1 *x, const T1 *y)
Inner product.
void CASADI_NLPSOL_FEASIBLESQPMETHOD_EXPORT casadi_load_nlpsol_feasiblesqpmethod()
T * get_ptr(std::vector< T > &v)
Get a pointer to the data contained in the vector.
void casadi_axpy(casadi_int n, T1 alpha, const T1 *x, T1 *y)
AXPY: y <- a*x + y.
T1 casadi_norm_inf(casadi_int n, const T1 *x)
void casadi_clear(T1 *x, casadi_int n)
CLEAR: x <- 0.
void casadi_mv(const T1 *x, const casadi_int *sp_x, const T1 *y, T1 *z, casadi_int tr)
Sparse matrix-vector multiplication: z <- z + x*y.
@ CONIC_X
The primal solution.
@ CONIC_LAM_A
The dual solution corresponding to linear bounds.
@ CONIC_COST
The optimal cost.
@ CONIC_LAM_X
The dual solution corresponding to simple bounds.
casadi_convexify_config< double > config
const char * return_status
Last return status.
casadi_feasiblesqpmethod_data< double > d
casadi_nlpsol_data< double > d_nlp
Options metadata for a class.
std::map< std::string, FStats > fstats
void add_stat(const std::string &s)
const casadi_nlpsol_prob< T1 > * nlp