worhp_interface.cpp
1 /*
2  * This file is part of CasADi.
3  *
4  * CasADi -- A symbolic framework for dynamic optimization.
5  * Copyright (C) 2010-2023 Joel Andersson, Joris Gillis, Moritz Diehl,
6  * KU Leuven. All rights reserved.
7  * Copyright (C) 2011-2014 Greg Horn
8  *
9  * CasADi is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 3 of the License, or (at your option) any later version.
13  *
14  * CasADi is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with CasADi; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  *
23  */
24 
25 
26 #include "worhp_interface.hpp"
27 
28 #include "casadi/core/casadi_misc.hpp"
29 #include <ctime>
30 #include <cstring>
31 
32 namespace casadi {
33 
34  extern "C"
35  int CASADI_NLPSOL_WORHP_EXPORT
36  casadi_register_nlpsol_worhp(Nlpsol::Plugin* plugin) {
37  plugin->creator = WorhpInterface::creator;
38  plugin->name = "worhp";
39  plugin->doc = WorhpInterface::meta_doc.c_str();
40  plugin->version = CASADI_VERSION;
41  plugin->options = &WorhpInterface::options_;
42  plugin->deserialize = &WorhpInterface::deserialize;
43  return 0;
44  }
45 
46  extern "C"
47  void CASADI_NLPSOL_WORHP_EXPORT casadi_load_nlpsol_worhp() {
49  }
50 
51  WorhpInterface::WorhpInterface(const std::string& name, const Function& nlp)
52  : Nlpsol(name, nlp) {
53  }
54 
56  clear_mem();
57  }
58 
60  = {{&Nlpsol::options_},
61  {{"worhp",
62  {OT_DICT,
63  "Options to be passed to WORHP"}}
64  }
65  };
66 
67  void WorhpInterface::init(const Dict& opts) {
68 
69  // Call the init method of the base class
70  Nlpsol::init(opts);
71 
72  if (CheckWorhpVersion(WORHP_MAJOR, WORHP_MINOR, WORHP_PATCH)) {
73  casadi_warning("Worhp incompatibility. Interface was compiled for Worhp " +
74  str(WORHP_MAJOR) + "." + str(WORHP_MINOR) + "." + std::string(WORHP_PATCH));
75  }
76 
77  // Default options
78  Dict worhp_opts;
79 
80  // Read user options
81  for (auto&& op : opts) {
82  if (op.first=="worhp") {
83  worhp_opts = op.second;
84  }
85  }
86 
87  // Sort Worhp options
88  casadi_int nopts = WorhpGetParamCount();
89  for (auto&& op : worhp_opts) {
90  if (op.first=="qp") {
91  qp_opts_ = op.second;
92  continue;
93  }
94 
95  // Get corresponding index using a linear search
96  casadi_int ind;
97  for (ind=1; ind<=nopts; ++ind) {
98  // Get name in WORHP
99  const char* name = WorhpGetParamName(ind);
100  // Break if matching name
101  if (op.first==name) break;
102  }
103  if (ind>nopts) casadi_error("No such Worhp option: " + op.first);
104 
105  // Add to the corresponding list
106  switch (WorhpGetParamType(ind)) {
107  case WORHP_BOOL_T:
108  bool_opts_[op.first] = op.second;
109  break;
110  case WORHP_DOUBLE_T:
111  double_opts_[op.first] = op.second;
112  break;
113  case WORHP_INT_T:
114  int_opts_[op.first] = op.second;
115  break;
116  default:
117  casadi_error("Cannot handle WORHP option \"" + op.first + "\": Unknown type " +
118  str(WorhpGetParamType(ind)) + ".");
119  break;
120  }
121  }
122 
123  // Setup NLP functions
124  create_function("nlp_f", {"x", "p"}, {"f"});
125  create_function("nlp_g", {"x", "p"}, {"g"});
126  create_function("nlp_grad_f", {"x", "p"}, {"f", "grad:f:x"});
127  Function jac_g_fcn = create_function("nlp_jac_g", {"x", "p"}, {"g", "jac:g:x"});
128  Function hess_l_fcn = create_function("nlp_hess_l", {"x", "p", "lam:f", "lam:g"},
129  {"transpose:triu:hess:gamma:x:x"},
130  {{"gamma", {"f", "g"}}});
131  jacg_sp_ = jac_g_fcn.sparsity_out(1);
132  hesslag_sp_ = hess_l_fcn.sparsity_out(0);
133 
134  // Temporary vectors
135  alloc_w(nx_); // for fetching diagonal entries form Hessian
136  }
137 
138  void worhp_disp(int mode, const char message[]) {
139  if (mode & WORHP_PRINT_MESSAGE) {
140  uout() << message << std::endl;
141  }
142  if (mode & WORHP_PRINT_WARNING) {
143  uerr() << message << std::endl;
144  }
145  if (mode & WORHP_PRINT_ERROR) {
146  uerr() << message << std::endl;
147  }
148  }
149 
150  int WorhpInterface::init_mem(void* mem) const {
151  if (Nlpsol::init_mem(mem)) return 1;
152  auto m = static_cast<WorhpMemory*>(mem);
153 
154  SetWorhpPrint(&worhp_disp);
155 
156  WorhpPreInit(&m->worhp_o, &m->worhp_w, &m->worhp_p, &m->worhp_c);
157  m->worhp_o.initialised = false;
158  m->worhp_w.initialised = false;
159  m->worhp_p.initialised = false;
160  m->worhp_c.initialised = false;
161 
162  // Initialize parameters to default values
163  int status;
164  InitParams(&status, &m->worhp_p);
165  casadi_assert(status==0, "Problem in Worhp InitParams. Status: " + str(status));
166 
167 
168  // Pass boolean parameters
169  for (auto&& op : bool_opts_) {
170  casadi_assert(
171  WorhpSetBoolParam(&m->worhp_p, op.first.c_str(), op.second),
172  "Problem setting boolean Worhp parameter " + op.first);
173  }
174 
175  // Pass double parameters
176  for (auto&& op : double_opts_) {
177  casadi_assert(
178  WorhpSetDoubleParam(&m->worhp_p, op.first.c_str(), op.second),
179  "Problem setting double Worhp parameter " + op.first);
180  }
181 
182  // Pass integer parameters
183  for (auto&& op : int_opts_) {
184  casadi_assert(
185  WorhpSetIntParam(&m->worhp_p, op.first.c_str(), op.second),
186  "Problem setting integer Worhp parameter " + op.first);
187  }
188 
189  // Pass qp parameters
190  for (auto&& op : qp_opts_) {
191  if (op.first=="ipBarrier") {
192  m->worhp_p.qp.ipBarrier = op.second;
193  } else if (op.first=="ipComTol") {
194  m->worhp_p.qp.ipComTol = op.second;
195  } else if (op.first=="ipFracBound") {
196  m->worhp_p.qp.ipFracBound = op.second;
197  } else if (op.first=="ipMinAlpha") {
198  m->worhp_p.qp.ipMinAlpha = op.second;
199  } else if (op.first=="ipRelaxDiv") {
200  m->worhp_p.qp.ipRelaxDiv = op.second;
201  } else if (op.first=="ipRelaxMax") {
202  m->worhp_p.qp.ipRelaxMax = op.second;
203  } else if (op.first=="ipRelaxMin") {
204  m->worhp_p.qp.ipRelaxMin = op.second;
205  } else if (op.first=="ipRelaxMult") {
206  m->worhp_p.qp.ipRelaxMult = op.second;
207  } else if (op.first=="ipResTol") {
208  m->worhp_p.qp.ipResTol = op.second;
209  } else if (op.first=="lsTol") {
210  m->worhp_p.qp.lsTol = op.second;
211  } else if (op.first=="nsnBeta") {
212  m->worhp_p.qp.nsnBeta = op.second;
213  } else if (op.first=="nsnKKT") {
214  m->worhp_p.qp.nsnKKT = op.second;
215  } else if (op.first=="nsnMinAlpha") {
216  m->worhp_p.qp.nsnMinAlpha = op.second;
217  } else if (op.first=="nsnSigma") {
218  m->worhp_p.qp.nsnSigma = op.second;
219  } else if (op.first=="ipLsMethod") {
220  m->worhp_p.qp.ipLsMethod = op.second;
221  } else if (op.first=="lsItMaxIter") {
222  m->worhp_p.qp.lsItMaxIter = op.second;
223  } else if (op.first=="lsItMethod") {
224  m->worhp_p.qp.lsItMethod = op.second;
225  } else if (op.first=="lsItPrecondMethod") {
226  m->worhp_p.qp.lsItPrecondMethod = op.second;
227  } else if (op.first=="lsRefineMaxIter") {
228  m->worhp_p.qp.lsRefineMaxIter = op.second;
229  } else if (op.first=="maxIter") {
230  m->worhp_p.qp.maxIter = op.second;
231  } else if (op.first=="method") {
232  m->worhp_p.qp.method = op.second;
233  } else if (op.first=="nsnLsMethod") {
234  m->worhp_p.qp.nsnLsMethod = op.second;
235  } else if (op.first=="printLevel") {
236  m->worhp_p.qp.printLevel = op.second;
237  } else if (op.first=="ipTryRelax") {
238  m->worhp_p.qp.ipTryRelax = op.second;
239  } else if (op.first=="lsScale") {
240  m->worhp_p.qp.lsScale = op.second;
241  } else if (op.first=="lsTrySimple") {
242  m->worhp_p.qp.lsTrySimple = op.second;
243  } else if (op.first=="nsnGradStep") {
244  m->worhp_p.qp.nsnGradStep = op.second;
245  } else if (op.first=="scaleIntern") {
246  m->worhp_p.qp.scaleIntern = op.second;
247  } else if (op.first=="strict") {
248  m->worhp_p.qp.strict = op.second;
249  } else {
250  casadi_error("No such Worhp option: qp." + op.first);
251  }
252  }
253 
254  // Mark the parameters as set
255  m->worhp_p.initialised = true;
256  m->init_ = false;
257 
258  return 0;
259  }
260 
261  void WorhpInterface::set_work(void* mem, const double**& arg, double**& res,
262  casadi_int*& iw, double*& w) const {
263  auto m = static_cast<WorhpMemory*>(mem);
264 
265  // Set work in base classes
266  Nlpsol::set_work(mem, arg, res, iw, w);
267 
268  // Free existing Worhp memory (except parameters)
269  m->worhp_p.initialised = false; // Avoid freeing the memory for parameters
270  if (m->worhp_o.initialised || m->worhp_w.initialised || m->worhp_c.initialised) {
271  WorhpFree(&m->worhp_o, &m->worhp_w, &m->worhp_p, &m->worhp_c);
272  }
273  m->worhp_p.initialised = true;
274 
275  // Number of (free) variables
276  m->worhp_o.n = nx_;
277 
278  // Number of constraints
279  m->worhp_o.m = ng_;
280 
282  m->worhp_c.initialised = false;
283  m->worhp_w.initialised = false;
284  m->worhp_o.initialised = false;
285 
286  // Worhp uses the CS format internally, hence it is the preferred sparse matrix format.
287  m->worhp_w.DF.nnz = nx_;
288  if (m->worhp_o.m>0) {
289  m->worhp_w.DG.nnz = jacg_sp_.nnz(); // Jacobian of G
290  } else {
291  m->worhp_w.DG.nnz = 0;
292  }
293 
294  if (true /*m->worhp_w.HM.NeedStructure*/) { // not initialized // NOLINT
295  m->worhp_w.HM.nnz = nx_ + hesslag_sp_.nnz_lower(true);
296  } else {
297  m->worhp_w.HM.nnz = 0;
298  }
299 
300  /* Data structure initialisation. */
301  WorhpInit(&m->worhp_o, &m->worhp_w, &m->worhp_p, &m->worhp_c);
302  m->init_ = true;
303  if (m->worhp_c.status != FirstCall) {
304  std::string msg = return_codes(m->worhp_c.status);
305  casadi_error("Main: Initialisation failed. Status: " + msg);
306  }
307 
308  if (m->worhp_w.DF.NeedStructure) {
309  for (casadi_int i=0; i<nx_; ++i) {
310  m->worhp_w.DF.row[i] = i + 1; // Index-1 based
311  }
312  }
313 
314  if (m->worhp_o.m>0 && m->worhp_w.DG.NeedStructure) {
315  casadi_int nz=0;
316  const casadi_int* colind = jacg_sp_.colind();
317  const casadi_int* row = jacg_sp_.row();
318  for (casadi_int c=0; c<nx_; ++c) {
319  for (casadi_int el=colind[c]; el<colind[c+1]; ++el) {
320  casadi_int r = row[el];
321  m->worhp_w.DG.col[nz] = c + 1; // Index-1 based
322  m->worhp_w.DG.row[nz] = r + 1;
323  nz++;
324  }
325  }
326  }
327 
328  if (m->worhp_w.HM.NeedStructure) {
329  // Get the sparsity pattern of the Hessian
330  const casadi_int* colind = hesslag_sp_.colind();
331  const casadi_int* row = hesslag_sp_.row();
332 
333  casadi_int nz=0;
334 
335  // Strictly lower triangular part of the Hessian (note CCS -> CRS format change)
336  for (casadi_int c=0; c<nx_; ++c) {
337  for (casadi_int el=colind[c]; el<colind[c+1]; ++el) {
338  if (row[el]>c) {
339  m->worhp_w.HM.row[nz] = row[el] + 1;
340  m->worhp_w.HM.col[nz] = c + 1;
341  nz++;
342  }
343  }
344  }
345 
346  // Diagonal always included
347  for (casadi_int r=0; r<nx_; ++r) {
348  m->worhp_w.HM.row[nz] = r + 1;
349  m->worhp_w.HM.col[nz] = r + 1;
350  nz++;
351  }
352  }
353  }
354 
355  int WorhpInterface::solve(void* mem) const {
356  auto m = static_cast<WorhpMemory*>(mem);
357  auto d_nlp = &m->d_nlp;
358 
359  for (casadi_int i=0; i<ng_; ++i) {
360  casadi_assert(!(d_nlp->lbz[nx_+i]==-inf && d_nlp->ubz[nx_+i] == inf),
361  "WorhpInterface::evaluate: Worhp cannot handle the case when both "
362  "LBG and UBG are infinite."
363  "You have that case at non-zero " + str(i)+ "."
364  "Reformulate your problem eliminating the corresponding constraint.");
365  }
366 
367  // Pass inputs to WORHP data structures
368  casadi_copy(d_nlp->z, nx_, m->worhp_o.X);
369  casadi_copy(d_nlp->lbz, nx_, m->worhp_o.XL);
370  casadi_copy(d_nlp->ubz, nx_, m->worhp_o.XU);
371  casadi_copy(d_nlp->lam, nx_, m->worhp_o.Lambda);
372  if (m->worhp_o.m>0) {
373  casadi_copy(d_nlp->lam+nx_, ng_, m->worhp_o.Mu);
374  casadi_copy(d_nlp->lbz+nx_, ng_, m->worhp_o.GL);
375  casadi_copy(d_nlp->ubz+nx_, ng_, m->worhp_o.GU);
376  }
377 
378  // Replace infinite bounds with m->worhp_p.Infty
379  double inf = std::numeric_limits<double>::infinity();
380  for (casadi_int i=0; i<nx_; ++i)
381  if (m->worhp_o.XL[i]==-inf) m->worhp_o.XL[i] = -m->worhp_p.Infty;
382  for (casadi_int i=0; i<nx_; ++i)
383  if (m->worhp_o.XU[i]== inf) m->worhp_o.XU[i] = m->worhp_p.Infty;
384  for (casadi_int i=0; i<ng_; ++i)
385  if (m->worhp_o.GL[i]==-inf) m->worhp_o.GL[i] = -m->worhp_p.Infty;
386  for (casadi_int i=0; i<ng_; ++i)
387  if (m->worhp_o.GU[i]== inf) m->worhp_o.GU[i] = m->worhp_p.Infty;
388 
389  if (verbose_) casadi_message("WorhpInterface::starting iteration");
390 
391  bool firstIteration = true;
392 
393  // Reverse Communication loop
394  while (m->worhp_c.status < TerminateSuccess && m->worhp_c.status > TerminateError) {
395  if (GetUserAction(&m->worhp_c, callWorhp)) {
396  Worhp(&m->worhp_o, &m->worhp_w, &m->worhp_p, &m->worhp_c);
397  }
398 
399 
400  if (GetUserAction(&m->worhp_c, iterOutput)) {
401 
402  if (!firstIteration) {
403  firstIteration = true;
404 
405  if (!fcallback_.is_null()) {
406  m->iter = m->worhp_w.MajorIter;
407  m->iter_sqp = m->worhp_w.MinorIter;
408  m->inf_pr = m->worhp_w.NormMax_CV;
409  m->inf_du = m->worhp_p.ScaledKKT;
410  m->alpha_pr = m->worhp_w.ArmijoAlpha;
411 
412  // Inputs
413  std::fill_n(m->arg, fcallback_.n_in(), nullptr);
414  m->arg[NLPSOL_X] = m->worhp_o.X;
415  m->arg[NLPSOL_F] = &m->worhp_o.F;
416  m->arg[NLPSOL_G] = m->worhp_o.G;
417  m->arg[NLPSOL_LAM_P] = nullptr;
418  m->arg[NLPSOL_LAM_X] = m->worhp_o.Lambda;
419  m->arg[NLPSOL_LAM_G] = m->worhp_o.Mu;
420 
421  // Outputs
422  std::fill_n(m->res, fcallback_.n_out(), nullptr);
423  double ret_double;
424  m->res[0] = &ret_double;
425 
426  m->fstats.at("callback_fun").tic();
427  // Evaluate the callback function
428  fcallback_(m->arg, m->res, m->iw, m->w, 0);
429  m->fstats.at("callback_fun").toc();
430  casadi_int ret = static_cast<casadi_int>(ret_double);
431 
432  if (ret) m->worhp_c.status = TerminateError;
433  }
434  }
435 
436 
437  IterationOutput(&m->worhp_o, &m->worhp_w, &m->worhp_p, &m->worhp_c);
438  DoneUserAction(&m->worhp_c, iterOutput);
439  }
440 
441  if (GetUserAction(&m->worhp_c, evalF)) {
442  m->arg[0] = m->worhp_o.X;
443  m->arg[1] = d_nlp->p;
444  m->res[0] = &m->worhp_o.F;
445  calc_function(m, "nlp_f");
446  d_nlp->objective = m->worhp_o.F; // Store cost, before scaling
447  m->worhp_o.F *= m->worhp_w.ScaleObj;
448  DoneUserAction(&m->worhp_c, evalF);
449  }
450 
451  if (GetUserAction(&m->worhp_c, evalG)) {
452  m->arg[0] = m->worhp_o.X;
453  m->arg[1] = d_nlp->p;
454  m->res[0] = m->worhp_o.G;
455  calc_function(m, "nlp_g");
456  DoneUserAction(&m->worhp_c, evalG);
457  }
458 
459  if (GetUserAction(&m->worhp_c, evalDF)) {
460  m->arg[0] = m->worhp_o.X;
461  m->arg[1] = d_nlp->p;
462  m->res[0] = nullptr;
463  m->res[1] = m->worhp_w.DF.val;
464  calc_function(m, "nlp_grad_f");
465  casadi_scal(nx_, m->worhp_w.ScaleObj, m->worhp_w.DF.val);
466  DoneUserAction(&m->worhp_c, evalDF);
467  }
468 
469  if (GetUserAction(&m->worhp_c, evalDG)) {
470  m->arg[0] = m->worhp_o.X;
471  m->arg[1] = d_nlp->p;
472  m->res[0] = nullptr;
473  m->res[1] = m->worhp_w.DG.val;
474  calc_function(m, "nlp_jac_g");
475  DoneUserAction(&m->worhp_c, evalDG);
476  }
477 
478  if (GetUserAction(&m->worhp_c, evalHM)) {
479  m->arg[0] = m->worhp_o.X;
480  m->arg[1] = d_nlp->p;
481  m->arg[2] = &m->worhp_w.ScaleObj;
482  m->arg[3] = m->worhp_o.Mu;
483  m->res[0] = m->worhp_w.HM.val;
484  calc_function(m, "nlp_hess_l");
485  // Diagonal values
486  double *dval = m->w;
487  casadi_clear(dval, nx_);
488 
489  // Remove diagonal
490  const casadi_int* colind = hesslag_sp_.colind();
491  const casadi_int* row = hesslag_sp_.row();
492  casadi_int ind=0;
493  for (casadi_int c=0; c<nx_; ++c) {
494  for (casadi_int el=colind[c]; el<colind[c+1]; ++el) {
495  if (row[el]==c) {
496  dval[c] = m->worhp_w.HM.val[el];
497  } else {
498  m->worhp_w.HM.val[ind++] = m->worhp_w.HM.val[el];
499  }
500  }
501  }
502 
503  // Add diagonal entries at the end
504  casadi_copy(dval, nx_, m->worhp_w.HM.val+ind);
505  DoneUserAction(&m->worhp_c, evalHM);
506  }
507 
508  if (GetUserAction(&m->worhp_c, fidif)) {
509  WorhpFidif(&m->worhp_o, &m->worhp_w, &m->worhp_p, &m->worhp_c);
510  }
511  }
512 
513  // Copy outputs
514  casadi_copy(m->worhp_o.X, nx_, d_nlp->z);
515  casadi_copy(m->worhp_o.G, ng_, d_nlp->z+nx_);
516  casadi_copy(m->worhp_o.Lambda, nx_, d_nlp->lam);
517  casadi_copy(m->worhp_o.Mu, ng_, d_nlp->lam+nx_);
518 
519  StatusMsg(&m->worhp_o, &m->worhp_w, &m->worhp_p, &m->worhp_c);
520 
521  m->return_code = m->worhp_c.status;
522  m->return_status = return_codes(m->worhp_c.status);
523  m->success = m->return_code > TerminateSuccess;
524  if (m->return_code==MaxCalls || m->return_code==MaxCalls || m->return_code==Timeout)
525  m->unified_return_status = SOLVER_RET_LIMITED;
526  if (m->return_code==evalsNaN)
527  m->unified_return_status = SOLVER_RET_NAN;
528  return 0;
529  }
530 
531  const char* WorhpInterface::return_codes(casadi_int flag) {
532  switch (flag) {
533  case OptimalSolution: return "OptimalSolution";
534  case OptimalSolutionConstantF: return "OptimalSolutionConstantF";
535  case SearchDirectionZero: return "SearchDirectionZero";
536  case SearchDirectionSmall: return "SearchDirectionSmall";
537  case FritzJohn: return "FritzJohn";
538  case NotDiffable: return "NotDiffable";
539  case Unbounded: return "Unbounded";
540  case FeasibleSolution: return "FeasibleSolution";
541  case LowPassFilterOptimal: return "LowPassFilterOptimal";
542  case LowPassFilterAcceptable: return "LowPassFilterAcceptable";
543  case AcceptableSolution: return "AcceptableSolution";
544  case AcceptablePrevious: return "AcceptablePrevious";
545  case AcceptableSolutionConstantF: return "AcceptableSolutionConstantF";
546  case AcceptablePreviousConstantF: return "AcceptablePreviousConstantF";
547  case AcceptableSolutionSKKT: return "AcceptableSolutionSKKT";
548  case AcceptableSolutionScaled: return "AcceptableSolutionScaled";
549  case AcceptablePreviousScaled: return "AcceptablePreviousScaled";
550  case TerminateError: return "TerminateError";
551  case MaxCalls: return "MaxCalls";
552  case MaxIter: return "MaxIter";
553  case Timeout: return "Timeout";
554  case TooBig: return "TooBig";
555  case evalsNaN: return "evalsNaN";
556  case DivergingPrimal: return "DivergingPrimal";
557  case DivergingDual: return "DivergingDual";
558  case MinimumStepsize: return "MinimumStepsize";
559  case RegularizationFailed: return "RegularizationFailed";
560  case InitError: return "InitError";
561  case DataError: return "DataError";
562  case RestartError: return "RestartError";
563  case QPerror: return "QPerror";
564  case LinearSolverFailed: return "LinearSolverFailed";
565  case TerminatedByCheckFD: return "TerminatedByCheckFD";
566  case LicenseError: return "LicenseError";
567  case Debug: return "Debug";
568  }
569  return "Unknown WORHP return code";
570  }
571 
573  this->worhp_o.initialised = false;
574  this->worhp_w.initialised = false;
575  this->worhp_p.initialised = false;
576  this->worhp_c.initialised = false;
577  }
578 
580  if (this->init_) {
581  if (this->worhp_p.initialised || this->worhp_o.initialised ||
582  this->worhp_w.initialised || this->worhp_c.initialised) {
583  WorhpFree(&this->worhp_o, &this->worhp_w, &this->worhp_p, &this->worhp_c);
584  }
585  }
586  }
587 
588  Dict WorhpInterface::get_stats(void* mem) const {
589  Dict stats = Nlpsol::get_stats(mem);
590  auto m = static_cast<WorhpMemory*>(mem);
591  stats["return_status"] = m->return_status;
592  return stats;
593  }
594 
596  s.version("WorhpInterface", 1);
597  s.unpack("WorhpInterface::jacg_sp", jacg_sp_);
598  s.unpack("WorhpInterface::hesslag_sp", hesslag_sp_);
599  s.unpack("WorhpInterface::bool_opts", bool_opts_);
600  s.unpack("WorhpInterface::int_opts", int_opts_);
601  s.unpack("WorhpInterface::double_opts", double_opts_);
602  s.unpack("WorhpInterface::qp_opts", qp_opts_);
603  }
604 
607  s.version("WorhpInterface", 1);
608  s.pack("WorhpInterface::jacg_sp", jacg_sp_);
609  s.pack("WorhpInterface::hesslag_sp", hesslag_sp_);
610  s.pack("WorhpInterface::bool_opts", bool_opts_);
611  s.pack("WorhpInterface::int_opts", int_opts_);
612  s.pack("WorhpInterface::double_opts", double_opts_);
613  s.pack("WorhpInterface::qp_opts", qp_opts_);
614  }
615 
616 } // namespace casadi
Helper class for Serialization.
void unpack(Sparsity &e)
Reconstruct an object from the input stream.
void version(const std::string &name, int v)
void alloc_w(size_t sz_w, bool persistent=false)
Ensure required length of w field.
Function object.
Definition: function.hpp:60
casadi_int n_out() const
Get the number of function outputs.
Definition: function.cpp:823
casadi_int n_in() const
Get the number of function inputs.
Definition: function.cpp:819
bool is_null() const
Is a null pointer?
NLP solver storage class.
Definition: nlpsol_impl.hpp:59
Dict get_stats(void *mem) const override
Get all statistics.
Definition: nlpsol.cpp:1162
static const Options options_
Options.
void init(const Dict &opts) override
Initialize.
Definition: nlpsol.cpp:420
casadi_int ng_
Number of constraints.
Definition: nlpsol_impl.hpp:69
int init_mem(void *mem) const override
Initalize memory block.
Definition: nlpsol.cpp:603
void serialize_body(SerializingStream &s) const override
Serialize an object without type information.
Definition: nlpsol.cpp:1306
casadi_int nx_
Number of variables.
Definition: nlpsol_impl.hpp:66
void set_work(void *mem, const double **&arg, double **&res, casadi_int *&iw, double *&w) const override
Set the (persistent) work vectors.
Definition: nlpsol.cpp:795
Function fcallback_
callback function, executed at each iteration
Definition: nlpsol_impl.hpp:75
Function create_function(const Function &oracle, const std::string &fname, const std::vector< std::string > &s_in, const std::vector< std::string > &s_out, const Function::AuxOut &aux=Function::AuxOut(), const Dict &opts=Dict())
int calc_function(OracleMemory *m, const std::string &fcn, const double *const *arg=nullptr, int thread_id=0) const
static void registerPlugin(const Plugin &plugin, bool needs_lock=true)
Register an integrator in the factory.
bool verbose_
Verbose printout.
void clear_mem()
Clear all memory (called from destructor)
Helper class for Serialization.
void version(const std::string &name, int v)
void pack(const Sparsity &e)
Serializes an object to the output stream.
casadi_int nnz_lower(bool strictly=false) const
Number of non-zeros in the lower triangular half,.
Definition: sparsity.cpp:352
casadi_int nnz() const
Get the number of (structural) non-zeros.
Definition: sparsity.cpp:148
const casadi_int * row() const
Get a reference to row-vector,.
Definition: sparsity.cpp:164
const casadi_int * colind() const
Get a reference to the colindex of all column element (see class description)
Definition: sparsity.cpp:168
std::map< std::string, casadi_int > int_opts_
int init_mem(void *mem) const override
Initalize memory block.
static const std::string meta_doc
A documentation string.
void init(const Dict &opts) override
Initialize.
static const Options options_
Options.
static const char * return_codes(casadi_int flag)
static Nlpsol * creator(const std::string &name, const Function &nlp)
Create a new NLP Solver.
std::map< std::string, double > double_opts_
WorhpInterface(const std::string &name, const Function &nlp)
std::map< std::string, bool > bool_opts_
void set_work(void *mem, const double **&arg, double **&res, casadi_int *&iw, double *&w) const override
Set the (persistent) work vectors.
Dict get_stats(void *mem) const override
Get all statistics.
void serialize_body(SerializingStream &s) const override
Serialize an object without type information.
static ProtoFunction * deserialize(DeserializingStream &s)
Deserialize into MX.
int solve(void *mem) const override
The casadi namespace.
Definition: archiver.cpp:28
@ NLPSOL_G
Constraints function at the optimal solution (ng x 1)
Definition: nlpsol.hpp:221
@ NLPSOL_X
Decision variables at the optimal solution (nx x 1)
Definition: nlpsol.hpp:217
@ NLPSOL_LAM_P
Lagrange multipliers for bounds on P at the solution (np x 1)
Definition: nlpsol.hpp:227
@ NLPSOL_F
Cost function value at the optimal solution (1 x 1)
Definition: nlpsol.hpp:219
@ NLPSOL_LAM_G
Lagrange multipliers for bounds on G at the solution (ng x 1)
Definition: nlpsol.hpp:225
@ NLPSOL_LAM_X
Lagrange multipliers for bounds on X at the solution (nx x 1)
Definition: nlpsol.hpp:223
std::ostream & uerr()
int CASADI_NLPSOL_WORHP_EXPORT casadi_register_nlpsol_worhp(Nlpsol::Plugin *plugin)
void casadi_copy(const T1 *x, casadi_int n, T1 *y)
COPY: y <-x.
std::string str(const T &v)
String representation, any type.
GenericType::Dict Dict
C++ equivalent of Python's dict or MATLAB's struct.
void CASADI_NLPSOL_WORHP_EXPORT casadi_load_nlpsol_worhp()
const double inf
infinity
Definition: calculus.hpp:50
void casadi_scal(casadi_int n, T1 alpha, T1 *x)
SCAL: x <- alpha*x.
void casadi_clear(T1 *x, casadi_int n)
CLEAR: x <- 0.
void worhp_disp(int mode, const char message[])
std::ostream & uout()
@ SOLVER_RET_NAN
@ SOLVER_RET_LIMITED
casadi_nlpsol_data< double > d_nlp
Definition: nlpsol_impl.hpp:42
Options metadata for a class.
Definition: options.hpp:40
const char * return_status
WorhpMemory()
Constructor.