27 #include "bonmin_interface.hpp"
28 #include "bonmin_nlp.hpp"
29 #include "casadi/core/casadi_misc.hpp"
30 #include "../../core/global_options.hpp"
31 #include "../../core/casadi_interrupt.hpp"
41 int CASADI_NLPSOL_BONMIN_EXPORT
44 plugin->name =
"bonmin";
46 plugin->version = CASADI_VERSION;
67 {{
"pass_nonlinear_variables",
69 "Pass list of variables entering nonlinearly to BONMIN"}},
70 {
"pass_nonlinear_constraints",
72 "Pass list of constraints entering nonlinearly to BONMIN"}},
75 "Options to be passed to BONMIN"}},
78 "String metadata (a dictionary with lists of strings) "
79 "about variables to be passed to BONMIN"}},
82 "Integer metadata (a dictionary with lists of integers) "
83 "about variables to be passed to BONMIN"}},
86 "Numeric metadata (a dictionary with lists of reals) about "
87 "variables to be passed to BONMIN"}},
90 "String metadata (a dictionary with lists of strings) about "
91 "constraints to be passed to BONMIN"}},
94 "Integer metadata (a dictionary with lists of integers) "
95 "about constraints to be passed to BONMIN"}},
98 "Numeric metadata (a dictionary with lists of reals) about "
99 "constraints to be passed to BONMIN"}},
102 "Function for calculating the Hessian of the Lagrangian (autogenerated by default)"}},
105 "Options for the autogenerated Hessian of the Lagrangian."}},
108 "Function for calculating the Jacobian of the constraints "
109 "(autogenerated by default)"}},
112 "Options for the autogenerated Jacobian of the constraints."}},
115 "Function for calculating the gradient of the objective "
116 "(column, autogenerated by default)"}},
119 "Options for the autogenerated gradient of the objective."}},
122 "Options for the autogenerated gradient of the objective."}},
125 "Options for the autogenerated gradient of the objective."}},
128 "Options for the autogenerated gradient of the objective."}},
139 Dict hess_lag_options, jac_g_options, grad_f_options;
141 std::vector< std::vector<int> > sos1_groups;
142 std::vector< std::vector<double> > sos1_weights;
144 for (
auto&& op : opts) {
145 if (op.first==
"bonmin") {
147 }
else if (op.first==
"pass_nonlinear_variables") {
149 }
else if (op.first==
"pass_nonlinear_constraints") {
151 }
else if (op.first==
"var_string_md") {
153 }
else if (op.first==
"var_integer_md") {
155 }
else if (op.first==
"var_numeric_md") {
157 }
else if (op.first==
"con_string_md") {
159 }
else if (op.first==
"con_integer_md") {
161 }
else if (op.first==
"con_numeric_md") {
163 }
else if (op.first==
"hess_lag_options") {
164 hess_lag_options = op.second;
165 }
else if (op.first==
"jac_g_options") {
166 jac_g_options = op.second;
167 }
else if (op.first==
"grad_f_options") {
168 grad_f_options = op.second;
169 }
else if (op.first==
"hess_lag") {
171 casadi_assert_dev(f.
n_in()==4);
172 casadi_assert_dev(f.
n_out()==1);
174 }
else if (op.first==
"jac_g") {
176 casadi_assert_dev(f.
n_in()==2);
177 casadi_assert_dev(f.
n_out()==2);
179 }
else if (op.first==
"grad_f") {
181 casadi_assert_dev(f.
n_in()==2);
182 casadi_assert_dev(f.
n_out()==2);
184 }
else if (op.first==
"sos1_groups") {
185 sos1_groups =
to_int(op.second.to_int_vector_vector());
186 for (
auto & g : sos1_groups) {
189 }
else if (op.first==
"sos1_weights") {
190 sos1_weights = op.second.to_double_vector_vector();
191 }
else if (op.first==
"sos1_priorities") {
198 auto hessian_approximation =
opts_.find(
"hessian_approximation");
199 if (hessian_approximation!=
opts_.end()) {
223 {
"triu:hess:gamma:x:x"}, {{
"gamma", {
"f",
"g"}}});
229 for (casadi_int i=0;i<
nx_;++i)
nl_ex_[i] = col[i+1]-col[i];
245 casadi_assert(sos1_weights.empty() || sos1_weights.size()==
sos_num_,
246 "sos1_weights has incorrect size");
248 "sos1_priorities has incorrect size");
252 for (casadi_int i=0;i<
sos_num_;++i) {
254 const std::vector<int>& sos1_group = sos1_groups[i];
257 std::vector<double> default_weights(sos1_group.size(), 1.0);
258 const std::vector<double>& sos1_weight =
259 sos1_weights.empty() ? default_weights : sos1_weights[i];
260 casadi_assert(sos1_weight.size()==sos1_group.size(),
261 "sos1_weights has incorrect size");
290 m->sos_info.types =
new char[
sos_num_];
291 m->sos_info.priorities =
new int[
sos_num_];
292 m->sos_info.starts =
new int[
sos_num_ + 1];
310 casadi_int*& iw,
double*& w)
const {
318 m->grad_fk = w; w +=
nx_;
327 case Bonmin::TMINLP::MINLP_ERROR:
328 return "MINLP_ERROR";
329 case Bonmin::TMINLP::SUCCESS:
331 case Bonmin::TMINLP::INFEASIBLE:
333 case Bonmin::TMINLP::CONTINUOUS_UNBOUNDED:
334 return "CONTINUOUS_UNBOUNDED";
335 case Bonmin::TMINLP::LIMIT_EXCEEDED:
336 return "LIMIT_EXCEEDED";
337 case Bonmin::TMINLP::USER_INTERRUPT:
338 return "USER_INTERRUPT";
343 inline std::string
to_str(
const CoinError& e) {
344 std::stringstream ss;
345 if (e.lineNumber()<0) {
346 ss << e.message()<<
" in "<< e.className()<<
"::" << e.methodName();
348 ss << e.fileName() <<
":" << e.lineNumber() <<
" method " << e.methodName()
349 <<
" : assertion \'" << e.message() <<
"\' failed.";
350 if (!e.className().empty())
351 ss <<
"Possible reason: "<< e.className();
356 inline std::string
to_str(TNLPSolver::UnsolvedError& e) {
357 std::stringstream ss;
368 class BonMinMessageHandler :
public CoinMessageHandler {
370 BonMinMessageHandler() { }
372 int print()
override {
373 uout() << messageBuffer_ << std::endl;
376 ~BonMinMessageHandler()
override { }
377 BonMinMessageHandler(
const BonMinMessageHandler &other): CoinMessageHandler(other) {}
378 BonMinMessageHandler(
const CoinMessageHandler &other): CoinMessageHandler(other) {}
379 BonMinMessageHandler & operator=(
const BonMinMessageHandler &rhs) {
380 CoinMessageHandler::operator=(rhs);
383 CoinMessageHandler* clone()
const override {
384 return new BonMinMessageHandler(*
this);
390 auto d_nlp = &m->
d_nlp;
397 m->regularization_size.clear();
401 m->ls_trials.clear();
409 BonMinMessageHandler mh;
412 BonminSetup bonmin(&mh);
414 SmartPtr<OptionsList> options =
new OptionsList();
415 SmartPtr<Journalist> journalist=
new Journalist();
416 SmartPtr<Bonmin::RegisteredOptions> roptions =
new Bonmin::RegisteredOptions();
420 StreamJournal* jrnl_raw =
new StreamJournal(
"console", J_ITERSUMMARY);
422 jrnl_raw->SetPrintLevel(J_DBG, J_NONE);
423 SmartPtr<Journal> jrnl = jrnl_raw;
424 journalist->AddJournal(jrnl);
427 options->SetJournalist(journalist);
428 options->SetRegisteredOptions(roptions);
429 bonmin.setOptionsAndJournalist(roptions, options, journalist);
430 bonmin.registerOptions();
432 auto regops = bonmin.roptions()->RegisteredOptionsList();
435 for (
auto&& op :
opts_) {
437 auto regops_it = regops.find(op.first);
438 if (regops_it==regops.end()) {
439 casadi_error(
"No such BONMIN option: " + op.first);
443 Ipopt::RegisteredOptionType ipopt_type = regops_it->second->Type();
447 switch (ipopt_type) {
448 case Ipopt::OT_Number:
449 ret = bonmin.options()->SetNumericValue(op.first, op.second.to_double(),
false);
451 case Ipopt::OT_Integer:
452 ret = bonmin.options()->SetIntegerValue(op.first, op.second.to_int(),
false);
454 case Ipopt::OT_String:
455 ret = bonmin.options()->SetStringValue(op.first, op.second.to_string(),
false);
457 case Ipopt::OT_Unknown:
459 casadi_warning(
"Cannot handle option \"" + op.first +
"\", ignored");
462 if (!ret) casadi_error(
"Invalid options were detected by BONMIN.");
466 bonmin.initialize(GetRawPtr(tminlp));
472 }
catch (CoinError& e) {
473 casadi_error(
"CoinError occured: " +
to_str(e));
474 }
catch (TNLPSolver::UnsolvedError& e) {
475 casadi_error(
"TNLPSolver::UnsolvedError occured" +
to_str(e));
477 casadi_error(
"Uncaught error in Bonmin");
487 const double* g,
const double* lambda,
double obj_value,
int iter,
488 double inf_pr,
double inf_du,
double mu,
double d_norm,
489 double regularization_size,
double alpha_du,
double alpha_pr,
490 int ls_trials,
bool full_callback)
const {
491 auto d_nlp = &m->
d_nlp;
494 if (
verbose_) casadi_message(
"intermediate_callback started");
495 m->
inf_pr.push_back(inf_pr);
496 m->
inf_du.push_back(inf_du);
498 m->
d_norm.push_back(d_norm);
503 m->
obj.push_back(obj_value);
508 for (casadi_int i=0; i<
nx_; ++i) {
509 d_nlp->lam[i] = z_U[i]-z_L[i];
516 <<
"Warning: intermediate_callback is disfunctional in your installation. "
517 "You will only be able to use stats(). "
518 "See https://github.com/casadi/casadi/wiki/enableBonminCallback to enable it."
539 m->
res[0] = &ret_double;
542 int ret =
static_cast<int>(ret_double);
544 m->
fstats.at(
"callback_fun").toc();
551 }
catch(std::exception& ex) {
552 casadi_warning(
"intermediate_callback: " + std::string(ex.
what()));
560 const double* x,
double obj_value)
const {
561 auto d_nlp = &m->
d_nlp;
567 d_nlp->objective = obj_value;
580 m->
success = status==Bonmin::TMINLP::SUCCESS;
582 }
catch(std::exception& ex) {
583 uerr() <<
"finalize_solution failed: " << ex.what() << std::endl;
593 double* g_l,
double* g_u)
const {
594 auto d_nlp = &m->
d_nlp;
601 }
catch(std::exception& ex) {
602 uerr() <<
"get_bounds_info failed: " << ex.what() << std::endl;
609 bool init_z,
double* z_L,
double* z_U,
610 bool init_lambda,
double* lambda)
const {
611 auto d_nlp = &m->
d_nlp;
620 for (casadi_int i=0; i<
nx_; ++i) {
621 z_L[i] = std::max(0., -d_nlp->lam[i]);
622 z_U[i] = std::max(0., d_nlp->lam[i]);
632 }
catch(std::exception& ex) {
633 uerr() <<
"get_starting_point failed: " << ex.what() << std::endl;
639 int& nnz_jac_g,
int& nnz_h_lag)
const {
653 }
catch(std::exception& ex) {
654 uerr() <<
"get_nlp_info failed: " << ex.what() << std::endl;
666 for (
auto&& i :
nl_ex_)
if (i) nv++;
669 }
catch(std::exception& ex) {
670 uerr() <<
"get_number_of_nonlinear_variables failed: " << ex.what() << std::endl;
678 for (casadi_int i=0; i<
nl_ex_.size(); ++i) {
679 if (
nl_ex_[i]) *pos_nonlin_vars++ = i;
682 }
catch(std::exception& ex) {
683 uerr() <<
"get_list_of_nonlinear_variables failed: " << ex.what() << std::endl;
699 stats[
"iter_count"] = m->iter_count;
704 s.
version(
"BonminInterface", 1);
733 s.
version(
"BonminInterface", 1);
BonminInterface(const std::string &name, const Function &nlp)
std::vector< int > sos1_starts_
std::vector< char > sos1_types_
int init_mem(void *mem) const override
Initalize memory block.
std::vector< double > sos1_weights_
Sos constraints information.
int solve(void *mem) const override
bool exact_hessian_
Exact Hessian?
Dict opts_
All BONMIN options.
std::vector< bool > nl_g_
bool pass_nonlinear_variables_
void serialize_body(SerializingStream &s) const override
Serialize an object without type information.
static Nlpsol * creator(const std::string &name, const Function &nlp)
Create a new NLP Solver.
bool get_list_of_nonlinear_variables(int num_nonlin_vars, int *pos_nonlin_vars) const
void finalize_solution(BonminMemory *m, Bonmin::TMINLP::SolverReturn status, const double *x, double obj_value) const
static const Options options_
Options.
Dict get_stats(void *mem) const override
Get all statistics.
void init(const Dict &opts) override
Initialize.
const Bonmin::TMINLP::SosInfo & sosConstraints(BonminMemory *m) const
std::vector< int > sos1_indices_
bool get_starting_point(BonminMemory *m, bool init_x, double *x, bool init_z, double *z_L, double *z_U, bool init_lambda, double *lambda) const
int get_number_of_nonlinear_variables() const
std::vector< bool > nl_ex_
std::vector< int > sos1_priorities_
bool pass_nonlinear_constraints_
static ProtoFunction * deserialize(DeserializingStream &s)
Deserialize into MX.
void set_work(void *mem, const double **&arg, double **&res, casadi_int *&iw, double *&w) const override
Set the (persistent) work vectors.
friend class BonminUserClass
~BonminInterface() override
void get_nlp_info(BonminMemory *m, int &nx, int &ng, int &nnz_jac_g, int &nnz_h_lag) const
bool intermediate_callback(BonminMemory *m, const double *x, const double *z_L, const double *z_U, const double *g, const double *lambda, double obj_value, int iter, double inf_pr, double inf_du, double mu, double d_norm, double regularization_size, double alpha_du, double alpha_pr, int ls_trials, bool full_callback) const
bool get_bounds_info(BonminMemory *m, double *x_l, double *x_u, double *g_l, double *g_u) const
static const std::string meta_doc
A documentation string.
const char * what() const override
Display error.
Helper class for Serialization.
void unpack(Sparsity &e)
Reconstruct an object from the input stream.
void version(const std::string &name, int v)
void alloc_w(size_t sz_w, bool persistent=false)
Ensure required length of w field.
std::vector< bool > which_depends(const std::string &s_in, const std::vector< std::string > &s_out, casadi_int order=1, bool tr=false) const
Which variables enter with some order.
casadi_int n_out() const
Get the number of function outputs.
casadi_int n_in() const
Get the number of function inputs.
bool is_null() const
Is a null pointer?
static casadi_int start_index
NLP solver storage class.
bool iteration_callback_ignore_errors_
Options.
Dict get_stats(void *mem) const override
Get all statistics.
static const Options options_
Options.
void init(const Dict &opts) override
Initialize.
casadi_int ng_
Number of constraints.
int init_mem(void *mem) const override
Initalize memory block.
void serialize_body(SerializingStream &s) const override
Serialize an object without type information.
casadi_int nx_
Number of variables.
void set_work(void *mem, const double **&arg, double **&res, casadi_int *&iw, double *&w) const override
Set the (persistent) work vectors.
Function fcallback_
callback function, executed at each iteration
void set_function(const Function &fcn, const std::string &fname, bool jit=false)
Function oracle_
Oracle: Used to generate other functions.
Function create_function(const Function &oracle, const std::string &fname, const std::vector< std::string > &s_in, const std::vector< std::string > &s_out, const Function::AuxOut &aux=Function::AuxOut(), const Dict &opts=Dict())
std::vector< std::string > get_function() const override
Get list of dependency functions.
bool has_function(const std::string &fname) const override
static void registerPlugin(const Plugin &plugin, bool needs_lock=true)
Register an integrator in the factory.
bool verbose_
Verbose printout.
void clear_mem()
Clear all memory (called from destructor)
Helper class for Serialization.
void version(const std::string &name, int v)
void pack(const Sparsity &e)
Serializes an object to the output stream.
casadi_int nnz() const
Get the number of (structural) non-zeros.
const casadi_int * colind() const
Get a reference to the colindex of all column element (see class description)
@ NLPSOL_G
Constraints function at the optimal solution (ng x 1)
@ NLPSOL_X
Decision variables at the optimal solution (nx x 1)
@ NLPSOL_LAM_P
Lagrange multipliers for bounds on P at the solution (np x 1)
@ NLPSOL_F
Cost function value at the optimal solution (1 x 1)
@ NLPSOL_LAM_G
Lagrange multipliers for bounds on G at the solution (ng x 1)
@ NLPSOL_LAM_X
Lagrange multipliers for bounds on X at the solution (nx x 1)
std::string to_str(const CoinError &e)
int to_int(casadi_int rhs)
void casadi_copy(const T1 *x, casadi_int n, T1 *y)
COPY: y <-x.
void casadi_fill(T1 *x, casadi_int n, T1 alpha)
FILL: x <- alpha.
const char * return_status_string(Bonmin::TMINLP::SolverReturn status)
int CASADI_NLPSOL_BONMIN_EXPORT casadi_register_nlpsol_bonmin(Nlpsol::Plugin *plugin)
GenericType::Dict Dict
C++ equivalent of Python's dict or MATLAB's struct.
const double nan
Not a number.
void CASADI_NLPSOL_BONMIN_EXPORT casadi_load_nlpsol_bonmin()
std::vector< double > obj
std::vector< double > inf_pr
~BonminMemory()
Destructor.
std::vector< double > alpha_du
const char * return_status
std::vector< double > d_norm
std::vector< double > alpha_pr
Bonmin::TMINLP::SosInfo sos_info
BonminMemory()
Constructor.
std::vector< double > regularization_size
std::vector< casadi_int > ls_trials
std::vector< double > inf_du
UnifiedReturnStatus unified_return_status
casadi_nlpsol_data< double > d_nlp
Options metadata for a class.
std::map< std::string, FStats > fstats