bonmin_interface.cpp
1 /*
2  * This file is part of CasADi.
3  *
4  * CasADi -- A symbolic framework for dynamic optimization.
5  * Copyright (C) 2010-2023 Joel Andersson, Joris Gillis, Moritz Diehl,
6  * KU Leuven. All rights reserved.
7  * Copyright (C) 2011-2014 Greg Horn
8  *
9  * CasADi is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 3 of the License, or (at your option) any later version.
13  *
14  * CasADi is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with CasADi; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  *
23  */
24 
25 
26 
27 #include "bonmin_interface.hpp"
28 #include "bonmin_nlp.hpp"
29 #include "casadi/core/casadi_misc.hpp"
30 #include "../../core/global_options.hpp"
31 #include "../../core/casadi_interrupt.hpp"
32 
33 #include <ctime>
34 #include <stdlib.h>
35 #include <iostream>
36 #include <iomanip>
37 #include <chrono>
38 
39 namespace casadi {
40  extern "C"
41  int CASADI_NLPSOL_BONMIN_EXPORT
42  casadi_register_nlpsol_bonmin(Nlpsol::Plugin* plugin) {
43  plugin->creator = BonminInterface::creator;
44  plugin->name = "bonmin";
45  plugin->doc = BonminInterface::meta_doc.c_str();
46  plugin->version = CASADI_VERSION;
47  plugin->options = &BonminInterface::options_;
48  plugin->deserialize = &BonminInterface::deserialize;
49  return 0;
50  }
51 
52  extern "C"
53  void CASADI_NLPSOL_BONMIN_EXPORT casadi_load_nlpsol_bonmin() {
55  }
56 
57  BonminInterface::BonminInterface(const std::string& name, const Function& nlp)
58  : Nlpsol(name, nlp) {
59  }
60 
62  clear_mem();
63  }
64 
66  = {{&Nlpsol::options_},
67  {{"pass_nonlinear_variables",
68  {OT_BOOL,
69  "Pass list of variables entering nonlinearly to BONMIN"}},
70  {"pass_nonlinear_constraints",
71  {OT_BOOL,
72  "Pass list of constraints entering nonlinearly to BONMIN"}},
73  {"bonmin",
74  {OT_DICT,
75  "Options to be passed to BONMIN"}},
76  {"var_string_md",
77  {OT_DICT,
78  "String metadata (a dictionary with lists of strings) "
79  "about variables to be passed to BONMIN"}},
80  {"var_integer_md",
81  {OT_DICT,
82  "Integer metadata (a dictionary with lists of integers) "
83  "about variables to be passed to BONMIN"}},
84  {"var_numeric_md",
85  {OT_DICT,
86  "Numeric metadata (a dictionary with lists of reals) about "
87  "variables to be passed to BONMIN"}},
88  {"con_string_md",
89  {OT_DICT,
90  "String metadata (a dictionary with lists of strings) about "
91  "constraints to be passed to BONMIN"}},
92  {"con_integer_md",
93  {OT_DICT,
94  "Integer metadata (a dictionary with lists of integers) "
95  "about constraints to be passed to BONMIN"}},
96  {"con_numeric_md",
97  {OT_DICT,
98  "Numeric metadata (a dictionary with lists of reals) about "
99  "constraints to be passed to BONMIN"}},
100  {"hess_lag",
101  {OT_FUNCTION,
102  "Function for calculating the Hessian of the Lagrangian (autogenerated by default)"}},
103  {"hess_lag_options",
104  {OT_DICT,
105  "Options for the autogenerated Hessian of the Lagrangian."}},
106  {"jac_g",
107  {OT_FUNCTION,
108  "Function for calculating the Jacobian of the constraints "
109  "(autogenerated by default)"}},
110  {"jac_g_options",
111  {OT_DICT,
112  "Options for the autogenerated Jacobian of the constraints."}},
113  {"grad_f",
114  {OT_FUNCTION,
115  "Function for calculating the gradient of the objective "
116  "(column, autogenerated by default)"}},
117  {"grad_f_options",
118  {OT_DICT,
119  "Options for the autogenerated gradient of the objective."}},
120  {"sos1_groups",
122  "Options for the autogenerated gradient of the objective."}},
123  {"sos1_weights",
125  "Options for the autogenerated gradient of the objective."}},
126  {"sos1_priorities",
127  {OT_INTVECTOR,
128  "Options for the autogenerated gradient of the objective."}},
129  }
130  };
131 
132  void BonminInterface::init(const Dict& opts) {
133  // Call the init method of the base class
134  Nlpsol::init(opts);
135 
136  // Default options
139  Dict hess_lag_options, jac_g_options, grad_f_options;
140 
141  std::vector< std::vector<int> > sos1_groups;
142  std::vector< std::vector<double> > sos1_weights;
143  // Read user options
144  for (auto&& op : opts) {
145  if (op.first=="bonmin") {
146  opts_ = op.second;
147  } else if (op.first=="pass_nonlinear_variables") {
148  pass_nonlinear_variables_ = op.second;
149  } else if (op.first=="pass_nonlinear_constraints") {
150  pass_nonlinear_constraints_ = op.second;
151  } else if (op.first=="var_string_md") {
152  var_string_md_ = op.second;
153  } else if (op.first=="var_integer_md") {
154  var_integer_md_ = op.second;
155  } else if (op.first=="var_numeric_md") {
156  var_numeric_md_ = op.second;
157  } else if (op.first=="con_string_md") {
158  con_string_md_ = op.second;
159  } else if (op.first=="con_integer_md") {
160  con_integer_md_ = op.second;
161  } else if (op.first=="con_numeric_md") {
162  con_numeric_md_ = op.second;
163  } else if (op.first=="hess_lag_options") {
164  hess_lag_options = op.second;
165  } else if (op.first=="jac_g_options") {
166  jac_g_options = op.second;
167  } else if (op.first=="grad_f_options") {
168  grad_f_options = op.second;
169  } else if (op.first=="hess_lag") {
170  Function f = op.second;
171  casadi_assert_dev(f.n_in()==4);
172  casadi_assert_dev(f.n_out()==1);
173  set_function(f, "nlp_hess_l");
174  } else if (op.first=="jac_g") {
175  Function f = op.second;
176  casadi_assert_dev(f.n_in()==2);
177  casadi_assert_dev(f.n_out()==2);
178  set_function(f, "nlp_jac_g");
179  } else if (op.first=="grad_f") {
180  Function f = op.second;
181  casadi_assert_dev(f.n_in()==2);
182  casadi_assert_dev(f.n_out()==2);
183  set_function(f, "nlp_grad_f");
184  } else if (op.first=="sos1_groups") {
185  sos1_groups = to_int(op.second.to_int_vector_vector());
186  for (auto & g : sos1_groups) {
187  for (auto & e : g) e-= GlobalOptions::start_index;
188  }
189  } else if (op.first=="sos1_weights") {
190  sos1_weights = op.second.to_double_vector_vector();
191  } else if (op.first=="sos1_priorities") {
192  sos1_priorities_ = to_int(op.second.to_int_vector());
193  }
194  }
195 
196  // Do we need second order derivatives?
197  exact_hessian_ = true;
198  auto hessian_approximation = opts_.find("hessian_approximation");
199  if (hessian_approximation!=opts_.end()) {
200  exact_hessian_ = hessian_approximation->second == "exact";
201  }
202 
203 
204  // Setup NLP functions
205  create_function("nlp_f", {"x", "p"}, {"f"});
206  create_function("nlp_g", {"x", "p"}, {"g"});
207  if (!has_function("nlp_grad_f")) {
208  create_function("nlp_grad_f", {"x", "p"}, {"f", "grad:f:x"});
209  }
210  if (!has_function("nlp_jac_g")) {
211  create_function("nlp_jac_g", {"x", "p"}, {"g", "jac:g:x"});
212  }
213  jacg_sp_ = get_function("nlp_jac_g").sparsity_out(1);
214 
215  // By default, assume all nonlinear
216  nl_ex_.resize(nx_, true);
217  nl_g_.resize(ng_, true);
218 
219  // Allocate temporary work vectors
220  if (exact_hessian_) {
221  if (!has_function("nlp_hess_l")) {
222  create_function("nlp_hess_l", {"x", "p", "lam:f", "lam:g"},
223  {"triu:hess:gamma:x:x"}, {{"gamma", {"f", "g"}}});
224  }
225  hesslag_sp_ = get_function("nlp_hess_l").sparsity_out(0);
226 
228  const casadi_int* col = hesslag_sp_.colind();
229  for (casadi_int i=0;i<nx_;++i) nl_ex_[i] = col[i+1]-col[i];
230  }
231  } else {
233  nl_ex_ = oracle_.which_depends("x", {"f", "g"}, 2, false);
234  }
236  nl_g_ = oracle_.which_depends("x", {"g"}, 2, true);
237 
238  // Create sos info
239 
240  // Declare size
241  sos_num_ = sos1_groups.size();
242  // sos1 type
243  sos1_types_.resize(sos_num_, 1);
244 
245  casadi_assert(sos1_weights.empty() || sos1_weights.size()==sos_num_,
246  "sos1_weights has incorrect size");
247  casadi_assert(sos1_priorities_.empty() || sos1_priorities_.size()==sos_num_,
248  "sos1_priorities has incorrect size");
249  if (sos1_priorities_.empty()) sos1_priorities_.resize(sos_num_, 1);
250 
251  sos_num_nz_ = 0;
252  for (casadi_int i=0;i<sos_num_;++i) {
253  // get local group
254  const std::vector<int>& sos1_group = sos1_groups[i];
255 
256  // Get local weights
257  std::vector<double> default_weights(sos1_group.size(), 1.0);
258  const std::vector<double>& sos1_weight =
259  sos1_weights.empty() ? default_weights : sos1_weights[i];
260  casadi_assert(sos1_weight.size()==sos1_group.size(),
261  "sos1_weights has incorrect size");
262 
263  // Populate lookup vector
264  sos1_starts_.push_back(sos_num_nz_);
265  sos_num_nz_+=sos1_group.size();
266 
267  sos1_weights_.insert(sos1_weights_.end(), sos1_weight.begin(), sos1_weight.end());
268  sos1_indices_.insert(sos1_indices_.end(), sos1_group.begin(), sos1_group.end());
269  }
270 
271  sos1_starts_.push_back(sos_num_nz_);
272 
273  // Allocate work vectors
274  alloc_w(nx_, true); // xk_
275  alloc_w(nx_, true); // lam_xk_
276  alloc_w(ng_, true); // gk_
277  alloc_w(nx_, true); // grad_fk_
278  alloc_w(jacg_sp_.nnz(), true); // jac_gk_
279  if (exact_hessian_) {
280  alloc_w(hesslag_sp_.nnz(), true); // hess_lk_
281  }
282  }
283 
284  int BonminInterface::init_mem(void* mem) const {
285 
286  auto m = static_cast<BonminMemory*>(mem);
287  m->sos_info.num = sos_num_;
288  m->sos_info.numNz = sos_num_nz_;
289  // sos_info takes ownership of passed-in pointers
290  m->sos_info.types = new char[sos_num_];
291  m->sos_info.priorities = new int[sos_num_];
292  m->sos_info.starts = new int[sos_num_ + 1];
293  m->sos_info.indices = new int[sos_num_nz_];
294  m->sos_info.weights = new double[sos_num_nz_];
295  casadi_assert_dev(sos_num_==sos1_types_.size());
296  casadi_assert_dev(sos_num_==sos1_priorities_.size());
297  casadi_assert_dev(sos_num_+1==sos1_starts_.size());
298  casadi_assert_dev(sos_num_nz_==sos1_indices_.size());
299  casadi_assert_dev(sos_num_nz_==sos1_weights_.size());
300  std::copy(sos1_types_.begin(), sos1_types_.end(), m->sos_info.types);
301  std::copy(sos1_priorities_.begin(), sos1_priorities_.end(), m->sos_info.priorities);
302  std::copy(sos1_starts_.begin(), sos1_starts_.end(), m->sos_info.starts);
303  std::copy(sos1_indices_.begin(), sos1_indices_.end(), m->sos_info.indices);
304  std::copy(sos1_weights_.begin(), sos1_weights_.end(), m->sos_info.weights);
305 
306  return Nlpsol::init_mem(mem);
307  }
308 
309  void BonminInterface::set_work(void* mem, const double**& arg, double**& res,
310  casadi_int*& iw, double*& w) const {
311  auto m = static_cast<BonminMemory*>(mem);
312 
313  // Set work in base classes
314  Nlpsol::set_work(mem, arg, res, iw, w);
315 
316  // Work vectors
317  m->gk = w; w += ng_;
318  m->grad_fk = w; w += nx_;
319  m->jac_gk = w; w += jacg_sp_.nnz();
320  if (exact_hessian_) {
321  m->hess_lk = w; w += hesslag_sp_.nnz();
322  }
323  }
324 
325  inline const char* return_status_string(Bonmin::TMINLP::SolverReturn status) {
326  switch (status) {
327  case Bonmin::TMINLP::MINLP_ERROR:
328  return "MINLP_ERROR";
329  case Bonmin::TMINLP::SUCCESS:
330  return "SUCCESS";
331  case Bonmin::TMINLP::INFEASIBLE:
332  return "INFEASIBLE";
333  case Bonmin::TMINLP::CONTINUOUS_UNBOUNDED:
334  return "CONTINUOUS_UNBOUNDED";
335  case Bonmin::TMINLP::LIMIT_EXCEEDED:
336  return "LIMIT_EXCEEDED";
337  case Bonmin::TMINLP::USER_INTERRUPT:
338  return "USER_INTERRUPT";
339  }
340  return "Unknown";
341  }
342 
343  inline std::string to_str(const CoinError& e) {
344  std::stringstream ss;
345  if (e.lineNumber()<0) {
346  ss << e.message()<< " in "<< e.className()<< "::" << e.methodName();
347  } else {
348  ss << e.fileName() << ":" << e.lineNumber() << " method " << e.methodName()
349  << " : assertion \'" << e.message() <<"\' failed.";
350  if (!e.className().empty())
351  ss <<"Possible reason: "<< e.className();
352  }
353  return ss.str();
354  }
355 
356  inline std::string to_str(TNLPSolver::UnsolvedError& e) {
357  std::stringstream ss;
358  e.printError(ss);
359  return ss.str();
360  }
361 
362 
368  class BonMinMessageHandler : public CoinMessageHandler {
369  public:
370  BonMinMessageHandler() { }
372  int print() override {
373  uout() << messageBuffer_ << std::endl;
374  return 0;
375  }
376  ~BonMinMessageHandler() override { }
377  BonMinMessageHandler(const BonMinMessageHandler &other): CoinMessageHandler(other) {}
378  BonMinMessageHandler(const CoinMessageHandler &other): CoinMessageHandler(other) {}
379  BonMinMessageHandler & operator=(const BonMinMessageHandler &rhs) {
380  CoinMessageHandler::operator=(rhs);
381  return *this;
382  }
383  CoinMessageHandler* clone() const override {
384  return new BonMinMessageHandler(*this);
385  }
386  };
387 
388  int BonminInterface::solve(void* mem) const {
389  auto m = static_cast<BonminMemory*>(mem);
390  auto d_nlp = &m->d_nlp;
391 
392  // Reset statistics
393  m->inf_pr.clear();
394  m->inf_du.clear();
395  m->mu.clear();
396  m->d_norm.clear();
397  m->regularization_size.clear();
398  m->alpha_pr.clear();
399  m->alpha_du.clear();
400  m->obj.clear();
401  m->ls_trials.clear();
402 
403  // Reset number of iterations
404  m->n_iter = 0;
405 
406  // MINLP instance
407  SmartPtr<BonminUserClass> tminlp = new BonminUserClass(*this, m);
408 
409  BonMinMessageHandler mh;
410 
411  // Start an BONMIN application
412  BonminSetup bonmin(&mh);
413 
414  SmartPtr<OptionsList> options = new OptionsList();
415  SmartPtr<Journalist> journalist= new Journalist();
416  SmartPtr<Bonmin::RegisteredOptions> roptions = new Bonmin::RegisteredOptions();
417 
418  {
419  // Direct output through casadi::uout()
420  StreamJournal* jrnl_raw = new StreamJournal("console", J_ITERSUMMARY);
421  jrnl_raw->SetOutputStream(&casadi::uout());
422  jrnl_raw->SetPrintLevel(J_DBG, J_NONE);
423  SmartPtr<Journal> jrnl = jrnl_raw;
424  journalist->AddJournal(jrnl);
425  }
426 
427  options->SetJournalist(journalist);
428  options->SetRegisteredOptions(roptions);
429  bonmin.setOptionsAndJournalist(roptions, options, journalist);
430  bonmin.registerOptions();
431  // Get all options available in BONMIN
432  auto regops = bonmin.roptions()->RegisteredOptionsList();
433 
434  // Pass all the options to BONMIN
435  for (auto&& op : opts_) {
436  // Find the option
437  auto regops_it = regops.find(op.first);
438  if (regops_it==regops.end()) {
439  casadi_error("No such BONMIN option: " + op.first);
440  }
441 
442  // Get the type
443  Ipopt::RegisteredOptionType ipopt_type = regops_it->second->Type();
444 
445  // Pass to BONMIN
446  bool ret;
447  switch (ipopt_type) {
448  case Ipopt::OT_Number:
449  ret = bonmin.options()->SetNumericValue(op.first, op.second.to_double(), false);
450  break;
451  case Ipopt::OT_Integer:
452  ret = bonmin.options()->SetIntegerValue(op.first, op.second.to_int(), false);
453  break;
454  case Ipopt::OT_String:
455  ret = bonmin.options()->SetStringValue(op.first, op.second.to_string(), false);
456  break;
457  case Ipopt::OT_Unknown:
458  default:
459  casadi_warning("Cannot handle option \"" + op.first + "\", ignored");
460  continue;
461  }
462  if (!ret) casadi_error("Invalid options were detected by BONMIN.");
463  }
464 
465  // Initialize
466  bonmin.initialize(GetRawPtr(tminlp));
467 
468  // Branch-and-bound
469  try {
470  Bab bb;
471  bb(bonmin);
472  } catch (CoinError& e) {
473  casadi_error("CoinError occured: " + to_str(e));
474  } catch (TNLPSolver::UnsolvedError& e) {
475  casadi_error("TNLPSolver::UnsolvedError occured" + to_str(e));
476  } catch (...) {
477  casadi_error("Uncaught error in Bonmin");
478  }
479 
480  // Save results to outputs
481  casadi_copy(m->gk, ng_, d_nlp->z + nx_);
482  return 0;
483  }
484 
486  intermediate_callback(BonminMemory* m, const double* x, const double* z_L, const double* z_U,
487  const double* g, const double* lambda, double obj_value, int iter,
488  double inf_pr, double inf_du, double mu, double d_norm,
489  double regularization_size, double alpha_du, double alpha_pr,
490  int ls_trials, bool full_callback) const {
491  auto d_nlp = &m->d_nlp;
492  m->n_iter += 1;
493  try {
494  if (verbose_) casadi_message("intermediate_callback started");
495  m->inf_pr.push_back(inf_pr);
496  m->inf_du.push_back(inf_du);
497  m->mu.push_back(mu);
498  m->d_norm.push_back(d_norm);
499  m->regularization_size.push_back(regularization_size);
500  m->alpha_pr.push_back(alpha_pr);
501  m->alpha_du.push_back(alpha_du);
502  m->ls_trials.push_back(ls_trials);
503  m->obj.push_back(obj_value);
504  if (!fcallback_.is_null()) {
505  ScopedTiming tic(m->fstats.at("callback_fun"));
506  if (full_callback) {
507  casadi_copy(x, nx_, d_nlp->z);
508  for (casadi_int i=0; i<nx_; ++i) {
509  d_nlp->lam[i] = z_U[i]-z_L[i];
510  }
511  casadi_copy(lambda, ng_, d_nlp->lam + nx_);
512  casadi_copy(g, ng_, m->gk);
513  } else {
514  if (iter==0) {
515  uerr()
516  << "Warning: intermediate_callback is disfunctional in your installation. "
517  "You will only be able to use stats(). "
518  "See https://github.com/casadi/casadi/wiki/enableBonminCallback to enable it."
519  << std::endl;
520  }
521  }
522 
523  // Inputs
524  std::fill_n(m->arg, fcallback_.n_in(), nullptr);
525  if (full_callback) {
526  // The values used below are meaningless
527  // when not doing a full_callback
528  m->arg[NLPSOL_X] = x;
529  m->arg[NLPSOL_F] = &obj_value;
530  m->arg[NLPSOL_G] = g;
531  m->arg[NLPSOL_LAM_P] = nullptr;
532  m->arg[NLPSOL_LAM_X] = d_nlp->lam;
533  m->arg[NLPSOL_LAM_G] = d_nlp->lam + nx_;
534  }
535 
536  // Outputs
537  std::fill_n(m->res, fcallback_.n_out(), nullptr);
538  double ret_double;
539  m->res[0] = &ret_double;
540 
541  fcallback_(m->arg, m->res, m->iw, m->w, 0);
542  int ret = static_cast<int>(ret_double);
543 
544  m->fstats.at("callback_fun").toc();
545  return !ret;
546  } else {
547  return 1;
548  }
549  } catch(KeyboardInterruptException& ex) {
550  return 0;
551  } catch(std::exception& ex) {
552  casadi_warning("intermediate_callback: " + std::string(ex.what()));
553  if (iteration_callback_ignore_errors_) return 1;
554  return 0;
555  }
556  }
557 
559  finalize_solution(BonminMemory* m, TMINLP::SolverReturn status,
560  const double* x, double obj_value) const {
561  auto d_nlp = &m->d_nlp;
562  try {
563  // Get primal solution
564  casadi_copy(x, nx_, d_nlp->z);
565 
566  // Get optimal cost
567  d_nlp->objective = obj_value;
568 
569  // Dual solution not calculated
570  casadi_fill(d_nlp->lam, nx_ + ng_, nan);
571 
572  // Get the constraints
573  casadi_fill(m->gk, ng_, nan);
574 
575  // Get statistics
576  m->iter_count = 0;
577 
578  // Interpret return code
579  m->return_status = return_status_string(status);
580  m->success = status==Bonmin::TMINLP::SUCCESS;
581  if (status==Bonmin::TMINLP::LIMIT_EXCEEDED) m->unified_return_status = SOLVER_RET_LIMITED;
582  } catch(std::exception& ex) {
583  uerr() << "finalize_solution failed: " << ex.what() << std::endl;
584  }
585  }
586 
587  const TMINLP::SosInfo& BonminInterface::sosConstraints(BonminMemory* m) const {
588  return m->sos_info;
589  }
590 
592  get_bounds_info(BonminMemory* m, double* x_l, double* x_u,
593  double* g_l, double* g_u) const {
594  auto d_nlp = &m->d_nlp;
595  try {
596  casadi_copy(d_nlp->lbz, nx_, x_l);
597  casadi_copy(d_nlp->ubz, nx_, x_u);
598  casadi_copy(d_nlp->lbz+nx_, ng_, g_l);
599  casadi_copy(d_nlp->ubz+nx_, ng_, g_u);
600  return true;
601  } catch(std::exception& ex) {
602  uerr() << "get_bounds_info failed: " << ex.what() << std::endl;
603  return false;
604  }
605  }
606 
608  get_starting_point(BonminMemory* m, bool init_x, double* x,
609  bool init_z, double* z_L, double* z_U,
610  bool init_lambda, double* lambda) const {
611  auto d_nlp = &m->d_nlp;
612  try {
613  // Initialize primal variables
614  if (init_x) {
615  casadi_copy(d_nlp->z, nx_, x);
616  }
617 
618  // Initialize dual variables (simple bounds)
619  if (init_z) {
620  for (casadi_int i=0; i<nx_; ++i) {
621  z_L[i] = std::max(0., -d_nlp->lam[i]);
622  z_U[i] = std::max(0., d_nlp->lam[i]);
623  }
624  }
625 
626  // Initialize dual variables (nonlinear bounds)
627  if (init_lambda) {
628  casadi_copy(d_nlp->lam + nx_, ng_, lambda);
629  }
630 
631  return true;
632  } catch(std::exception& ex) {
633  uerr() << "get_starting_point failed: " << ex.what() << std::endl;
634  return false;
635  }
636  }
637 
638  void BonminInterface::get_nlp_info(BonminMemory* m, int& nx, int& ng,
639  int& nnz_jac_g, int& nnz_h_lag) const {
640  try {
641  // Number of variables
642  nx = nx_;
643 
644  // Number of constraints
645  ng = ng_;
646 
647  // Number of Jacobian nonzeros
648  nnz_jac_g = ng_==0 ? 0 : jacg_sp_.nnz();
649 
650  // Number of Hessian nonzeros (only upper triangular half)
651  nnz_h_lag = exact_hessian_ ? hesslag_sp_.nnz() : 0;
652 
653  } catch(std::exception& ex) {
654  uerr() << "get_nlp_info failed: " << ex.what() << std::endl;
655  }
656  }
657 
659  try {
661  // No Hessian has been interfaced
662  return -1;
663  } else {
664  // Number of variables that appear nonlinearily
665  int nv = 0;
666  for (auto&& i : nl_ex_) if (i) nv++;
667  return nv;
668  }
669  } catch(std::exception& ex) {
670  uerr() << "get_number_of_nonlinear_variables failed: " << ex.what() << std::endl;
671  return -1;
672  }
673  }
674 
676  get_list_of_nonlinear_variables(int num_nonlin_vars, int* pos_nonlin_vars) const {
677  try {
678  for (casadi_int i=0; i<nl_ex_.size(); ++i) {
679  if (nl_ex_[i]) *pos_nonlin_vars++ = i;
680  }
681  return true;
682  } catch(std::exception& ex) {
683  uerr() << "get_list_of_nonlinear_variables failed: " << ex.what() << std::endl;
684  return false;
685  }
686  }
687 
689  this->return_status = "Unset";
690  }
691 
693  }
694 
695  Dict BonminInterface::get_stats(void* mem) const {
696  Dict stats = Nlpsol::get_stats(mem);
697  auto m = static_cast<BonminMemory*>(mem);
698  stats["return_status"] = m->return_status;
699  stats["iter_count"] = m->iter_count;
700  return stats;
701  }
702 
704  s.version("BonminInterface", 1);
705  s.unpack("BonminInterface::jacg_sp", jacg_sp_);
706  s.unpack("BonminInterface::hesslag_sp", hesslag_sp_);
707  s.unpack("BonminInterface::exact_hessian", exact_hessian_);
708  s.unpack("BonminInterface::opts", opts_);
709 
710  s.unpack("BonminInterface::sos1_weights", sos1_weights_);
711  s.unpack("BonminInterface::sos1_indices", sos1_indices_);
712  s.unpack("BonminInterface::sos1_priorities", sos1_priorities_);
713  s.unpack("BonminInterface::sos1_starts", sos1_starts_);
714  s.unpack("BonminInterface::sos1_types", sos1_types_);
715  s.unpack("BonminInterface::sos1_types", sos1_types_);
716  s.unpack("BonminInterface::sos_num", sos_num_);
717  s.unpack("BonminInterface::sos_num_nz", sos_num_nz_);
718 
719  s.unpack("BonminInterface::pass_nonlinear_variables", pass_nonlinear_variables_);
720  s.unpack("BonminInterface::pass_nonlinear_constraints", pass_nonlinear_constraints_);
721  s.unpack("BonminInterface::nl_ex", nl_ex_);
722  s.unpack("BonminInterface::nl_g", nl_g_);
723  s.unpack("BonminInterface::var_string_md", var_string_md_);
724  s.unpack("BonminInterface::var_integer_md", var_integer_md_);
725  s.unpack("BonminInterface::var_numeric_md", var_numeric_md_);
726  s.unpack("BonminInterface::con_string_md", con_string_md_);
727  s.unpack("BonminInterface::con_integer_md", con_integer_md_);
728  s.unpack("BonminInterface::con_numeric_md", con_numeric_md_);
729  }
730 
733  s.version("BonminInterface", 1);
734  s.pack("BonminInterface::jacg_sp", jacg_sp_);
735  s.pack("BonminInterface::hesslag_sp", hesslag_sp_);
736  s.pack("BonminInterface::exact_hessian", exact_hessian_);
737  s.pack("BonminInterface::opts", opts_);
738 
739  s.pack("BonminInterface::sos1_weights", sos1_weights_);
740  s.pack("BonminInterface::sos1_indices", sos1_indices_);
741  s.pack("BonminInterface::sos1_priorities", sos1_priorities_);
742  s.pack("BonminInterface::sos1_starts", sos1_starts_);
743  s.pack("BonminInterface::sos1_types", sos1_types_);
744  s.pack("BonminInterface::sos1_types", sos1_types_);
745  s.pack("BonminInterface::sos_num", sos_num_);
746  s.pack("BonminInterface::sos_num_nz", sos_num_nz_);
747 
748  s.pack("BonminInterface::pass_nonlinear_variables", pass_nonlinear_variables_);
749  s.pack("BonminInterface::pass_nonlinear_constraints", pass_nonlinear_constraints_);
750  s.pack("BonminInterface::nl_ex", nl_ex_);
751  s.pack("BonminInterface::nl_g", nl_g_);
752  s.pack("BonminInterface::var_string_md", var_string_md_);
753  s.pack("BonminInterface::var_integer_md", var_integer_md_);
754  s.pack("BonminInterface::var_numeric_md", var_numeric_md_);
755  s.pack("BonminInterface::con_string_md", con_string_md_);
756  s.pack("BonminInterface::con_integer_md", con_integer_md_);
757  s.pack("BonminInterface::con_numeric_md", con_numeric_md_);
758  }
759 
760 } // namespace casadi
BonminInterface(const std::string &name, const Function &nlp)
std::vector< int > sos1_starts_
std::vector< char > sos1_types_
int init_mem(void *mem) const override
Initalize memory block.
std::vector< double > sos1_weights_
Sos constraints information.
int solve(void *mem) const override
bool exact_hessian_
Exact Hessian?
Dict opts_
All BONMIN options.
std::vector< bool > nl_g_
void serialize_body(SerializingStream &s) const override
Serialize an object without type information.
static Nlpsol * creator(const std::string &name, const Function &nlp)
Create a new NLP Solver.
bool get_list_of_nonlinear_variables(int num_nonlin_vars, int *pos_nonlin_vars) const
void finalize_solution(BonminMemory *m, Bonmin::TMINLP::SolverReturn status, const double *x, double obj_value) const
static const Options options_
Options.
Dict get_stats(void *mem) const override
Get all statistics.
void init(const Dict &opts) override
Initialize.
const Bonmin::TMINLP::SosInfo & sosConstraints(BonminMemory *m) const
std::vector< int > sos1_indices_
bool get_starting_point(BonminMemory *m, bool init_x, double *x, bool init_z, double *z_L, double *z_U, bool init_lambda, double *lambda) const
int get_number_of_nonlinear_variables() const
std::vector< bool > nl_ex_
std::vector< int > sos1_priorities_
static ProtoFunction * deserialize(DeserializingStream &s)
Deserialize into MX.
void set_work(void *mem, const double **&arg, double **&res, casadi_int *&iw, double *&w) const override
Set the (persistent) work vectors.
void get_nlp_info(BonminMemory *m, int &nx, int &ng, int &nnz_jac_g, int &nnz_h_lag) const
bool intermediate_callback(BonminMemory *m, const double *x, const double *z_L, const double *z_U, const double *g, const double *lambda, double obj_value, int iter, double inf_pr, double inf_du, double mu, double d_norm, double regularization_size, double alpha_du, double alpha_pr, int ls_trials, bool full_callback) const
bool get_bounds_info(BonminMemory *m, double *x_l, double *x_u, double *g_l, double *g_u) const
static const std::string meta_doc
A documentation string.
const char * what() const override
Display error.
Definition: exception.hpp:90
Helper class for Serialization.
void unpack(Sparsity &e)
Reconstruct an object from the input stream.
void version(const std::string &name, int v)
void alloc_w(size_t sz_w, bool persistent=false)
Ensure required length of w field.
Function object.
Definition: function.hpp:60
std::vector< bool > which_depends(const std::string &s_in, const std::vector< std::string > &s_out, casadi_int order=1, bool tr=false) const
Which variables enter with some order.
Definition: function.cpp:1826
casadi_int n_out() const
Get the number of function outputs.
Definition: function.cpp:823
casadi_int n_in() const
Get the number of function inputs.
Definition: function.cpp:819
bool is_null() const
Is a null pointer?
static casadi_int start_index
NLP solver storage class.
Definition: nlpsol_impl.hpp:59
bool iteration_callback_ignore_errors_
Options.
Definition: nlpsol_impl.hpp:95
Dict get_stats(void *mem) const override
Get all statistics.
Definition: nlpsol.cpp:1162
static const Options options_
Options.
void init(const Dict &opts) override
Initialize.
Definition: nlpsol.cpp:420
casadi_int ng_
Number of constraints.
Definition: nlpsol_impl.hpp:69
int init_mem(void *mem) const override
Initalize memory block.
Definition: nlpsol.cpp:603
void serialize_body(SerializingStream &s) const override
Serialize an object without type information.
Definition: nlpsol.cpp:1306
casadi_int nx_
Number of variables.
Definition: nlpsol_impl.hpp:66
void set_work(void *mem, const double **&arg, double **&res, casadi_int *&iw, double *&w) const override
Set the (persistent) work vectors.
Definition: nlpsol.cpp:795
Function fcallback_
callback function, executed at each iteration
Definition: nlpsol_impl.hpp:75
void set_function(const Function &fcn, const std::string &fname, bool jit=false)
Function oracle_
Oracle: Used to generate other functions.
Function create_function(const Function &oracle, const std::string &fname, const std::vector< std::string > &s_in, const std::vector< std::string > &s_out, const Function::AuxOut &aux=Function::AuxOut(), const Dict &opts=Dict())
std::vector< std::string > get_function() const override
Get list of dependency functions.
bool has_function(const std::string &fname) const override
static void registerPlugin(const Plugin &plugin, bool needs_lock=true)
Register an integrator in the factory.
bool verbose_
Verbose printout.
void clear_mem()
Clear all memory (called from destructor)
Helper class for Serialization.
void version(const std::string &name, int v)
void pack(const Sparsity &e)
Serializes an object to the output stream.
casadi_int nnz() const
Get the number of (structural) non-zeros.
Definition: sparsity.cpp:148
const casadi_int * colind() const
Get a reference to the colindex of all column element (see class description)
Definition: sparsity.cpp:168
The casadi namespace.
Definition: archiver.cpp:28
@ NLPSOL_G
Constraints function at the optimal solution (ng x 1)
Definition: nlpsol.hpp:221
@ NLPSOL_X
Decision variables at the optimal solution (nx x 1)
Definition: nlpsol.hpp:217
@ NLPSOL_LAM_P
Lagrange multipliers for bounds on P at the solution (np x 1)
Definition: nlpsol.hpp:227
@ NLPSOL_F
Cost function value at the optimal solution (1 x 1)
Definition: nlpsol.hpp:219
@ NLPSOL_LAM_G
Lagrange multipliers for bounds on G at the solution (ng x 1)
Definition: nlpsol.hpp:225
@ NLPSOL_LAM_X
Lagrange multipliers for bounds on X at the solution (nx x 1)
Definition: nlpsol.hpp:223
std::string to_str(const CoinError &e)
std::ostream & uerr()
int to_int(casadi_int rhs)
Definition: casadi_misc.cpp:56
void casadi_copy(const T1 *x, casadi_int n, T1 *y)
COPY: y <-x.
void casadi_fill(T1 *x, casadi_int n, T1 alpha)
FILL: x <- alpha.
const char * return_status_string(Bonmin::TMINLP::SolverReturn status)
int CASADI_NLPSOL_BONMIN_EXPORT casadi_register_nlpsol_bonmin(Nlpsol::Plugin *plugin)
@ OT_INTVECTOR
@ OT_DOUBLEVECTORVECTOR
@ OT_INTVECTORVECTOR
GenericType::Dict Dict
C++ equivalent of Python's dict or MATLAB's struct.
const double nan
Not a number.
Definition: calculus.hpp:53
void CASADI_NLPSOL_BONMIN_EXPORT casadi_load_nlpsol_bonmin()
std::ostream & uout()
@ SOLVER_RET_LIMITED
std::vector< double > mu
std::vector< double > obj
std::vector< double > inf_pr
std::vector< double > alpha_du
std::vector< double > d_norm
std::vector< double > alpha_pr
Bonmin::TMINLP::SosInfo sos_info
std::vector< double > regularization_size
std::vector< casadi_int > ls_trials
std::vector< double > inf_du
UnifiedReturnStatus unified_return_status
Definition: nlpsol_impl.hpp:48
casadi_nlpsol_data< double > d_nlp
Definition: nlpsol_impl.hpp:42
Options metadata for a class.
Definition: options.hpp:40
std::map< std::string, FStats > fstats