fatrop_interface.cpp
1 /*
2  * This file is part of CasADi.
3  *
4  * CasADi -- A symbolic framework for dynamic optimization.
5  * Copyright (C) 2010-2023 Joel Andersson, Joris Gillis, Moritz Diehl,
6  * KU Leuven. All rights reserved.
7  * Copyright (C) 2011-2014 Greg Horn
8  *
9  * CasADi is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 3 of the License, or (at your option) any later version.
13  *
14  * CasADi is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with CasADi; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  *
23  */
24 
25 
26 
27 #include "fatrop_interface.hpp"
28 #include "casadi/core/casadi_misc.hpp"
29 #include "../../core/global_options.hpp"
30 #include "../../core/casadi_interrupt.hpp"
31 #include "../../core/convexify.hpp"
32 #include "casadi/casadi_c.h"
33 
34 #include <ctime>
35 #include <stdlib.h>
36 #include <iostream>
37 #include <iomanip>
38 #include <chrono>
39 
40 #include <fatrop_runtime_str.h>
41 
42 namespace casadi {
43  extern "C"
44  int CASADI_NLPSOL_FATROP_EXPORT
45  casadi_register_nlpsol_fatrop(Nlpsol::Plugin* plugin) {
46  plugin->creator = FatropInterface::creator;
47  plugin->name = "fatrop";
48  plugin->doc = FatropInterface::meta_doc.c_str();
49  plugin->version = CASADI_VERSION;
50  plugin->options = &FatropInterface::options_;
51  plugin->deserialize = &FatropInterface::deserialize;
52  return 0;
53  }
54 
55  extern "C"
56  void CASADI_NLPSOL_FATROP_EXPORT casadi_load_nlpsol_fatrop() {
58  }
59 
60  FatropInterface::FatropInterface(const std::string& name, const Function& nlp)
61  : Nlpsol(name, nlp) {
62  }
63 
65  clear_mem();
66  }
67 
68  Sparsity FatropInterface::blocksparsity(casadi_int rows, casadi_int cols,
69  const std::vector<casadi_ocp_block>& blocks, bool eye) {
70  DM r(rows, cols);
71  for (auto && b : blocks) {
72  if (eye) {
73  r(range(b.offset_r, b.offset_r+b.rows),
74  range(b.offset_c, b.offset_c+b.cols)) = DM::eye(b.rows);
75  casadi_assert_dev(b.rows==b.cols);
76  } else {
77  r(range(b.offset_r, b.offset_r+b.rows),
78  range(b.offset_c, b.offset_c+b.cols)) = DM::zeros(b.rows, b.cols);
79  }
80  }
81  return r.sparsity();
82  }
83 
84  void report_issue(casadi_int i, const std::string& msg) {
85  casadi_int idx = i+GlobalOptions::start_index;
86  casadi_warning("Structure detection error on row " + str(idx) + ". " + msg);
87  }
88 
89  const Options FatropInterface::options_
90  = {{&Nlpsol::options_},
91  {{"N",
92  {OT_INT,
93  "OCP horizon"}},
94  {"nx",
95  {OT_INTVECTOR,
96  "Number of states, length N+1"}},
97  {"nu",
98  {OT_INTVECTOR,
99  "Number of controls, length N+1"}},
100  {"ng",
101  {OT_INTVECTOR,
102  "Number of non-dynamic constraints, length N+1"}},
103  {"fatrop",
104  {OT_DICT,
105  "Options to be passed to fatrop"}},
106  {"structure_detection",
107  {OT_STRING,
108  "NONE | auto | manual"}},
109  {"convexify_strategy",
110  {OT_STRING,
111  "NONE|regularize|eigen-reflect|eigen-clip. "
112  "Strategy to convexify the Lagrange Hessian before passing it to the solver."}},
113  {"convexify_margin",
114  {OT_DOUBLE,
115  "When using a convexification strategy, make sure that "
116  "the smallest eigenvalue is at least this (default: 1e-7)."}},
117  {"debug",
118  {OT_BOOL,
119  "Produce debug information (default: false)"}},
120  {"fatrop",
121  {OT_DICT,
122  "Options to be passed to fatrop"
123  }}
124  }
125  };
126 
127  void FatropInterface::init(const Dict& opts) {
128  // Call the init method of the base class
129  Nlpsol::init(opts);
130 
131  casadi_int struct_cnt=0;
132 
133  // Default options
134  std::string convexify_strategy = "none";
135  double convexify_margin = 1e-7;
136  casadi_int max_iter_eig = 200;
137  structure_detection_ = STRUCTURE_NONE;
138  debug_ = false;
139 
140  calc_g_ = true;
141  calc_f_ = true;
142 
143  // Read options
144  for (auto&& op : opts) {
145  if (op.first=="N") {
146  N_ = op.second;
147  struct_cnt++;
148  } else if (op.first=="nx") {
149  nxs_ = op.second;
150  struct_cnt++;
151  } else if (op.first=="nu") {
152  nus_ = op.second;
153  struct_cnt++;
154  } else if (op.first=="ng") {
155  ngs_ = op.second;
156  struct_cnt++;
157  } else if (op.first=="convexify_strategy") {
158  convexify_strategy = op.second.to_string();
159  } else if (op.first=="convexify_margin") {
160  convexify_margin = op.second;
161  } else if (op.first=="max_iter_eig") {
162  max_iter_eig = op.second;
163  } else if (op.first=="fatrop") {
164  opts_ = op.second;
165  } else if (op.first=="structure_detection") {
166  std::string v = op.second;
167  if (v=="auto") {
168  structure_detection_ = STRUCTURE_AUTO;
169  } else if (v=="manual") {
170  structure_detection_ = STRUCTURE_MANUAL;
171  } else if (v=="none") {
172  structure_detection_ = STRUCTURE_NONE;
173  } else {
174  casadi_error("Unknown option for structure_detection: '" + v + "'.");
175  }
176  } else if (op.first=="debug") {
177  debug_ = op.second;
178  }
179  }
180 
181  // Do we need second order derivatives?
182  exact_hessian_ = true;
183  auto hessian_approximation = opts_.find("hessian_approximation");
184  if (hessian_approximation!=opts_.end()) {
185  exact_hessian_ = hessian_approximation->second == "exact";
186  }
187 
188  // Setup NLP functions
189  create_function("nlp_f", {"x", "p"}, {"f"});
190  create_function("nlp_g", {"x", "p"}, {"g"});
191  if (!has_function("nlp_grad_f")) {
192  create_function("nlp_grad_f", {"x", "p"}, {"grad:f:x"});
193  }
194  if (!has_function("nlp_jac_g")) {
195  create_function("nlp_jac_g", {"x", "p"}, {"g", "jac:g:x"});
196  }
197  jacg_sp_ = get_function("nlp_jac_g").sparsity_out(1);
198 
199  convexify_ = false;
200 
201  // Allocate temporary work vectors
202  if (exact_hessian_) {
203  if (!has_function("nlp_hess_l")) {
204  create_function("nlp_hess_l", {"x", "p", "lam:f", "lam:g"},
205  {"grad:gamma:x", "hess:gamma:x:x"}, {{"gamma", {"f", "g"}}});
206  }
207  hesslag_sp_ = get_function("nlp_hess_l").sparsity_out(1);
208  casadi_assert(hesslag_sp_.is_symmetric(), "Hessian must be symmetric");
209  if (convexify_strategy!="none") {
210  convexify_ = true;
211  Dict opts;
212  opts["strategy"] = convexify_strategy;
213  opts["margin"] = convexify_margin;
214  opts["max_iter_eig"] = max_iter_eig;
215  opts["verbose"] = verbose_;
217  }
218  }
219 
220  const std::vector<casadi_int>& nx = nxs_;
221  const std::vector<casadi_int>& ng = ngs_;
222  const std::vector<casadi_int>& nu = nus_;
223 
224  Sparsity lamg_csp_, lam_ulsp_, lam_uusp_, lam_xlsp_, lam_xusp_, lam_clsp_;
225 
226  const Sparsity& A_ = jacg_sp_;
227  if (debug_) {
228  A_.to_file("debug_fatrop_actual.mtx");
229  }
230  // Keep list of erroring rows
231  std::set<casadi_int> errors;
232 
233  casadi_int na_ = A_.size1(); //TODO(jgillis): replace with ng
234 
235  if (struct_cnt>0) {
236  casadi_assert(structure_detection_ == STRUCTURE_MANUAL,
237  "You must set structure_detection to 'manual' if you set N, nx, nu, ng.");
238  }
239 
240  if (structure_detection_==STRUCTURE_MANUAL) {
241  casadi_assert(struct_cnt==4,
242  "You must set all of N, nx, nu, ng.");
243  } else if (structure_detection_==STRUCTURE_NONE) {
244  N_ = 0;
245  nxs_ = {0};
246  nus_ = {nx_};
247  ngs_ = {ng_};
248  } else if (structure_detection_==STRUCTURE_AUTO) {
249  casadi_assert(!equality_.empty(),
250  "Structure detection auto requires the 'equality' option to be set");
251  /* General strategy: look for the xk+1 diagonal part in A
252  */
253 
254  // Find the right-most column for each row in A -> A_skyline
255  // Find the second-to-right-most column -> A_skyline2
256  // Find the left-most column -> A_bottomline
257  Sparsity AT = A_.T();
258  std::vector<casadi_int> A_skyline;
259  std::vector<casadi_int> A_skyline2;
260  std::vector<casadi_int> A_bottomline;
261 
262  std::vector<casadi_int> AT_colind = AT.get_colind();
263  std::vector<casadi_int> AT_row = AT.get_row();
264  for (casadi_int i=0;i<AT.size2();++i) {
265  casadi_int pivot = AT_colind.at(i+1);
266  if (pivot>AT_colind.at(i)) {
267  A_bottomline.push_back(AT_row.at(AT_colind.at(i)));
268  } else {
269  A_bottomline.push_back(-1);
270  }
271  if (pivot>AT_colind.at(i)) {
272  A_skyline.push_back(AT_row.at(pivot-1));
273  if (pivot>AT_colind.at(i)+1) {
274  A_skyline2.push_back(AT_row.at(pivot-2));
275  } else {
276  A_skyline2.push_back(-1);
277  }
278  } else {
279  A_skyline.push_back(-1);
280  A_skyline2.push_back(-1);
281  }
282  }
283 
284  casadi_assert(equality_[0],
285  "Constraint Jcobian must start with gap-closing constraint "
286  "(tagged 'true' in equality vector).");
287 
288  casadi_int pivot = A_skyline[0]; // Current right-most element
289  casadi_int start_pivot = pivot; // First right-most element that started the stage
290  casadi_int prev_start_pivot = 0;
291 
292  bool walking = true;
293 
294  nxs_.push_back(1);
295  nus_.push_back(0);
296  ngs_.push_back(0);
297  for (casadi_int i=1;i<na_;++i) { // Loop over all rows
298  bool is_gap_closing = true;
299  if (A_bottomline[i]!=-1 && A_bottomline[i]<prev_start_pivot) {
300  errors.insert(i);
301  report_issue(i, "Constraint found depending on a state of the previous interval.");
302  }
303  if (equality_[i]) {
304  // A candidate for a gap-closing constraint must tagged as equality
305  if (A_skyline[i]>pivot+1) { // Jump to a diagonal in the future
306  if (A_bottomline[i]!=-1 && A_bottomline[i]<start_pivot) {
307  errors.insert(i);
308  report_issue(i, "Constraint found depending on a state of the previous interval.");
309  }
310  //if (A_bottomline[i]<start_pivot || A_bottomline[i]>pivot) {
311  // errors.insert(i);
312  // report_issue(i, "Gap-closing constraint must depend on a state.");
313  //}
314  nxs_.push_back(1);
315  nus_.push_back(A_skyline[i]-pivot-1); // Size of jump equals number of states
316  ngs_.push_back(0);
317  prev_start_pivot = start_pivot;
318  start_pivot = A_skyline[i];
319  pivot = A_skyline[i];
320  walking = true;
321  } else if (A_skyline[i]==pivot+1) { // Walking the diagonal
322  if (A_skyline2[i]<start_pivot) { // Free of below-diagonal entries?
323  if (A_bottomline[i]>=prev_start_pivot) { // We must depend on at least one state
324  pivot++;
325  nxs_.back()++;
326  walking = true;
327  } else {
328  if (A_bottomline[i]!=-1 && A_bottomline[i]<start_pivot) {
329  errors.insert(i);
330  report_issue(i, "Constraint found depending "
331  "on a state of the previous interval.");
332  }
333  is_gap_closing = false;
334  }
335  } else {
336  nxs_.push_back(1);
337  nus_.push_back(0);
338  ngs_.push_back(0);
339  if (A_bottomline[i]!=-1 && A_bottomline[i]<start_pivot) {
340  errors.insert(i);
341  report_issue(i, "Gap-closing constraint found depending "
342  "on a state of the previous interval.");
343  }
344  prev_start_pivot = start_pivot;
345  start_pivot = A_skyline[i];
346  pivot = A_skyline[i];
347  walking = true;
348  }
349  } else {
350  is_gap_closing = false;
351  }
352  } else {
353  is_gap_closing = false;
354  }
355 
356  if (!is_gap_closing) {
357  if (walking) {
358  if (A_skyline[i]>=start_pivot) {
359  nxs_.push_back(0);
360  nus_.push_back(0);
361  ngs_.push_back(0);
362  walking = false;
363  }
364  }
365  ngs_.back()++; // non-gap-closing constraint detected
366  }
367 
368 
369  }
370 
371  if (nxs_.back()!=0) {
372  nxs_.push_back(0);
373  nus_.push_back(0);
374  ngs_.push_back(0);
375  }
376 
377  // Set nx0==nx1 unless not allowed
378  nxs_.insert(nxs_.begin(), std::min(A_skyline[0], nxs_.front()));
379 
380  // Patch loose ends
381  nus_.front() += std::max(A_skyline[0]-nxs_.front(), static_cast<casadi_int>(0));
382  nus_.back() += nx_-sum(nu)-sum(nx);
383 
384  casadi_assert_dev(nxs_.back()==0);
385  nxs_.pop_back();
386 
387  casadi_assert_dev(nx.size()==nu.size());
388  casadi_assert_dev(nx.size()==ng.size());
389 
390  casadi_assert_dev(sum(ng)+sum(nx)==na_+nx.front());
391  casadi_assert_dev(sum(nx)+sum(nu)==nx_);
392 
393  N_ = nxs_.size()-1;
394  }
395 
396  casadi_assert(nx.size()==N_+1, "nx must have length N+1.");
397  casadi_assert(nu.size()==N_+1, "nu must have length N+1.");
398  casadi_assert(ng.size()==N_+1, "ng must have length N+1.");
399 
400  if (verbose_) {
401  casadi_message("Using structure: N " + str(N_) + ", nx " + str(nx) + ", "
402  "nu " + str(nu) + ", ng " + str(ng) + ".");
403  }
404 
405  // Dor debugging purposes
406  std::vector< casadi_ocp_block > A_blocks, B_blocks, C_blocks, D_blocks;
407 
408  /* Disassemble A input into:
409  A B I
410  C D
411  A B I
412  C D
413  C D
414  */
415  casadi_int offset_r = 0, offset_c = 0;
416  for (casadi_int k=0;k<N_;++k) { // Loop over blocks
417  AB_blocks_.push_back({offset_r, offset_c, nx[k+1], nx[k]+nu[k]});
418  CD_blocks_.push_back({offset_r+nx[k+1], offset_c, ng[k], nx[k]+nu[k]});
419  A_blocks.push_back({offset_r, offset_c, nx[k+1], nx[k]});
420  B_blocks.push_back({offset_r, offset_c+nx[k], nx[k+1], nu[k]});
421  C_blocks.push_back({offset_r+nx[k+1], offset_c, ng[k], nx[k]});
422  D_blocks.push_back({offset_r+nx[k+1], offset_c+nx[k], ng[k], nu[k]});
423  offset_c+= nx[k]+nu[k];
424  if (k+1<N_)
425  I_blocks_.push_back({offset_r, offset_c, nx[k+1], nx[k+1]});
426  // TODO(jgillis) actually use these
427  // test5.py versus tesst6.py
428  // test5 changes behaviour when piping stdout to file -> memory corruption
429  // logs are ever so slightly different
430  else
431  I_blocks_.push_back({offset_r, offset_c, nx[k+1], nx[k+1]});
432  offset_r+= nx[k+1]+ng[k];
433  }
434  CD_blocks_.push_back({offset_r, offset_c, ng[N_], nx[N_]+nu[N_]});
435  C_blocks.push_back({offset_r, offset_c, ng[N_], nx[N_]});
436  D_blocks.push_back({offset_r, offset_c+nx[N_], ng[N_], nu[N_]});
437 
438  casadi_int offset = 0;
439  AB_offsets_.push_back(0);
440  for (auto e : AB_blocks_) {
441  offset += e.rows*e.cols;
442  AB_offsets_.push_back(offset);
443  }
444  offset = 0;
445  CD_offsets_.push_back(0);
446  for (auto e : CD_blocks_) {
447  offset += e.rows*e.cols;
448  CD_offsets_.push_back(offset);
449  }
450 
451  ABsp_ = blocksparsity(na_, nx_, AB_blocks_);
452  CDsp_ = blocksparsity(na_, nx_, CD_blocks_);
453  Isp_ = blocksparsity(na_, nx_, I_blocks_, true);
454 
455  Sparsity total = ABsp_ + CDsp_ + Isp_;
456 
457  if (debug_) {
458  total.to_file("debug_fatrop_expected.mtx");
459  blocksparsity(na_, nx_, A_blocks).to_file("debug_fatrop_A.mtx");
460  blocksparsity(na_, nx_, B_blocks).to_file("debug_fatrop_B.mtx");
461  blocksparsity(na_, nx_, C_blocks).to_file("debug_fatrop_C.mtx");
462  blocksparsity(na_, nx_, D_blocks).to_file("debug_fatrop_D.mtx");
463  Isp_.to_file("debug_fatrop_I.mtx");
464  std::vector<casadi_int> errors_vec(errors.begin(), errors.end());
465  std::vector<casadi_int> colind = {0, static_cast<casadi_int>(errors_vec.size())};
466  Sparsity(na_, 1, colind, errors_vec).to_file("debug_fatrop_errors.mtx");
467  }
468 
469  casadi_assert(errors.empty() && (A_ + total).nnz() == total.nnz(),
470  "HPIPM: specified structure of A does not correspond to what the interface can handle. "
471  "Structure is: N " + str(N_) + ", nx " + str(nx) + ", nu " + str(nu) + ", "
472  "ng " + str(ng) + ".\n"
473  "Note that debug_fatrop_expected.mtx and debug_fatrop_actual.mtx are written "
474  "to the current directory when 'debug' option is true.\n"
475  "These can be read with Sparsity.from_file(...)."
476  "For a ready-to-use script, "
477  "see https://gist.github.com/jgillis/dec56fa16c90a8e4a69465e8422c5459");
478  casadi_assert_dev(total.nnz() == ABsp_.nnz() + CDsp_.nnz() + Isp_.nnz());
479 
480  /* Disassemble H input into:
481  Q S'
482  S R
483  Q S'
484  S R
485 
486  Multiply by 2
487  */
488  offset = 0;
489  for (casadi_int k=0;k<N_+1;++k) { // Loop over blocks
490  RSQ_blocks_.push_back({offset, offset, nx[k]+nu[k], nx[k]+nu[k]});
491  offset+= nx[k]+nu[k];
492  }
493  RSQsp_ = blocksparsity(nx_, nx_, RSQ_blocks_);
494 
495  offset = 0;
496  RSQ_offsets_.push_back(0);
497  for (auto e : RSQ_blocks_) {
498  offset += e.rows*e.cols;
499  RSQ_offsets_.push_back(offset);
500  }
501 
502  set_fatrop_prob();
503 
504  // Allocate memory
505  casadi_int sz_arg, sz_res, sz_w, sz_iw;
506  casadi_fatrop_work(&p_, &sz_arg, &sz_res, &sz_iw, &sz_w);
507 
508  alloc_arg(sz_arg, true);
509  alloc_res(sz_res, true);
510  alloc_iw(sz_iw, true);
511  alloc_w(sz_w, true);
512  }
513 
514  int FatropInterface::init_mem(void* mem) const {
515  if (Nlpsol::init_mem(mem)) return 1;
516  if (!mem) return 1;
517  auto m = static_cast<FatropMemory*>(mem);
518  fatrop_init_mem(&m->d);
519 
520  return 0;
521  }
522 
523  void FatropInterface::free_mem(void* mem) const {
524  auto m = static_cast<FatropMemory*>(mem);
525  fatrop_free_mem(&m->d);
526  delete static_cast<FatropMemory*>(mem);
527  }
528 
530  void FatropInterface::set_work(void* mem, const double**& arg, double**& res,
531  casadi_int*& iw, double*& w) const {
532  auto m = static_cast<FatropMemory*>(mem);
533 
534  // Set work in base classes
535  Nlpsol::set_work(mem, arg, res, iw, w);
536 
537  m->d.prob = &p_;
538  m->d.nlp = &m->d_nlp;
539 
540  casadi_fatrop_init(&m->d, &arg, &res, &iw, &w);
541 
542  m->d.nlp->oracle->m = static_cast<void*>(m);
543 
544  // options
545  }
546 
547  int FatropInterface::solve(void* mem) const {
548  auto m = static_cast<FatropMemory*>(mem);
549 
550  casadi_fatrop_presolve(&m->d);
551 
552  for (const auto& kv : opts_) {
553  switch (fatrop_ocp_c_option_type(kv.first.c_str())) {
554  case 0:
555  fatrop_ocp_c_set_option_double(m->d.solver, kv.first.c_str(), kv.second);
556  break;
557  case 1:
558  fatrop_ocp_c_set_option_int(m->d.solver, kv.first.c_str(), kv.second.to_int());
559  break;
560  case 2:
561  fatrop_ocp_c_set_option_bool(m->d.solver, kv.first.c_str(), kv.second.to_bool());
562  break;
563  case 3:
564  {
565  std::string s = kv.second.to_string();
566  fatrop_ocp_c_set_option_string(m->d.solver, kv.first.c_str(), s.c_str());
567  }
568  break;
569  case -1:
570  casadi_error("Fatrop option not supported: " + kv.first);
571  default:
572  casadi_error("Unknown option type.");
573  }
574  }
575 
576  casadi_fatrop_solve(&m->d);
577 
578  m->success = m->d.success;
579  m->unified_return_status = static_cast<UnifiedReturnStatus>(m->d.unified_return_status);
580 
581  return 0;
582  }
583 
584  Dict FatropInterface::get_stats(void* mem) const {
585  Dict stats = Nlpsol::get_stats(mem);
586  auto m = static_cast<FatropMemory*>(mem);
587  Dict fatrop;
588  fatrop["compute_sd_time"] = m->d.stats.compute_sd_time;
589  fatrop["duinf_time"] = m->d.stats.duinf_time;
590  fatrop["eval_hess_time"] = m->d.stats.eval_hess_time;
591  fatrop["eval_jac_time"] = m->d.stats.eval_jac_time;
592  fatrop["eval_cv_time"] = m->d.stats.eval_cv_time;
593  fatrop["eval_grad_time"] = m->d.stats.eval_grad_time;
594  fatrop["eval_obj_time"] = m->d.stats.eval_obj_time;
595  fatrop["initialization_time"] = m->d.stats.initialization_time;
596  fatrop["time_total"] = m->d.stats.time_total;
597  fatrop["eval_hess_count"] = m->d.stats.eval_hess_count;
598  fatrop["eval_jac_count"] = m->d.stats.eval_jac_count;
599  fatrop["eval_cv_count"] = m->d.stats.eval_cv_count;
600  fatrop["eval_grad_count"] = m->d.stats.eval_grad_count;
601  fatrop["eval_obj_count"] = m->d.stats.eval_obj_count;
602  fatrop["iterations_count"] = m->d.stats.iterations_count;
603  fatrop["return_flag"] = m->d.stats.return_flag;
604  stats["fatrop"] = fatrop;
605  stats["iter_count"] = m->d.stats.iterations_count;
606  stats["nx"] = nxs_;
607  stats["nu"] = nus_;
608  stats["ng"] = ngs_;
609  stats["N"] = N_;
610  stats["return_status"] = m->d.return_status;
611  return stats;
612  }
613 
615  g << "fatrop_init_mem(&" + codegen_mem(g) + ");\n";
616  g << "return 0;\n";
617  }
618 
620  g << "fatrop_free_mem(&" + codegen_mem(g) + ");\n";
621  }
622 
639  g.add_dependency(get_function("nlp_f"));
640  g.add_dependency(get_function("nlp_grad_f"));
641  g.add_dependency(get_function("nlp_g"));
642  g.add_dependency(get_function("nlp_jac_g"));
643  if (exact_hessian_) {
644  g.add_dependency(get_function("nlp_hess_l"));
645  }
646  g.add_include("fatrop/ocp/OCPCInterface.h");
647 
648  std::string name = "fatrop_cb_write";
649  std::string f = g.shorthand(name);
650 
651  g << "void " << f
652  << "(const char* msg, int num) {\n";
653  g.flush(g.body);
654  g.scope_enter();
655  g << "CASADI_PRINTF(\"%.*s\", num, msg);\n";
656  g.scope_exit();
657  g << "}\n";
658 
659  name = "fatrop_cb_flush";
660  f = g.shorthand(name);
661 
662  g << "void " << f
663  << "(void) {\n";
664  g.flush(g.body);
665  g.scope_enter();
666  g.scope_exit();
667  g << "}\n";
668 }
669 
672  g.auxiliaries << g.sanitize_source(fatrop_runtime_str, {"casadi_real"});
673 
674  g.local("d", "struct casadi_fatrop_data*");
675  g.init_local("d", "&" + codegen_mem(g));
676  g.local("p", "struct casadi_fatrop_prob");
677  set_fatrop_prob(g);
678 
679  g << "casadi_fatrop_init(d, &arg, &res, &iw, &w);\n";
680  g << "casadi_oracle_init(d->nlp->oracle, &arg, &res, &iw, &w);\n";
681  g << "casadi_fatrop_presolve(d);\n";
682 
683  for (const auto& kv : opts_) {
684  switch (fatrop_ocp_c_option_type(kv.first.c_str())) {
685  case 0:
686  g << "fatrop_ocp_c_set_option_double(d->solver, \"" + kv.first + "\", "
687  + str(kv.second) + ");\n";
688  break;
689  case 1:
690  g << "fatrop_ocp_c_set_option_int(d->solver, \"" + kv.first + "\", "
691  + str(kv.second.to_int()) + ");\n";
692  break;
693  case 2:
694  g << "fatrop_ocp_c_set_option_bool(d->solver, \"" + kv.first + "\", "
695  + str(static_cast<int>(kv.second.to_bool())) + ");\n";
696  break;
697  case 3:
698  {
699  std::string s = kv.second.to_string();
700  g << "fatrop_ocp_c_set_option_bool(d->solver, \"" + kv.first + "\", \""
701  + s + "\");\n";
702  }
703  break;
704  case -1:
705  casadi_error("Fatrop option not supported: " + kv.first);
706  default:
707  casadi_error("Unknown option type.");
708  }
709  }
710 
711  // Options
712  g << "casadi_fatrop_solve(d);\n";
713 
715 
716  if (error_on_fail_) {
717  g << "return d->unified_return_status;\n";
718  } else {
719  g << "return 0;\n";
720  }
721 }
722 
723 std::vector<casadi_int> fatrop_blocks_pack(const std::vector<casadi_ocp_block>& blocks) {
724  size_t N = blocks.size();
725  std::vector<casadi_int> ret(4*N+1);
726  casadi_int* r = get_ptr(ret);
727  *r++ = N;
728  for (casadi_int i=0;i<N;++i) {
729  *r++ = blocks[i].offset_r;
730  *r++ = blocks[i].offset_c;
731  *r++ = blocks[i].rows;
732  *r++ = blocks[i].cols;
733  }
734  return ret;
735 }
736 
737 
738 
740  p_.nlp = &p_nlp_;
741  p_.nx = get_ptr(nxs_);
742  p_.nu = get_ptr(nus_);
743  p_.ABsp = ABsp_;
744  p_.AB_offsets = get_ptr(AB_offsets_);
745  p_.CDsp = CDsp_;
746  p_.CD_offsets = get_ptr(CD_offsets_);
747  p_.RSQsp = RSQsp_;
748  p_.RSQ_offsets = get_ptr(RSQ_offsets_);
749  p_.Isp = Isp_;
750  p_.I_offsets = get_ptr(I_offsets_);
751 
752  p_.AB = get_ptr(AB_blocks_);
753  p_.CD = get_ptr(CD_blocks_);
754  p_.RSQ = get_ptr(RSQ_blocks_);
755  p_.I = get_ptr(I_blocks_);
756  p_.N = N_;
757 
758  p_.sp_a = jacg_sp_;
759  p_.sp_h = hesslag_sp_;
760 
761  p_.nlp_hess_l = OracleCallback("nlp_hess_l", this);
762  p_.nlp_jac_g = OracleCallback("nlp_jac_g", this);
763  p_.nlp_grad_f = OracleCallback("nlp_grad_f", this);
764  p_.nlp_f = OracleCallback("nlp_f", this);
765  p_.nlp_g = OracleCallback("nlp_g", this);
766  p_.write = &casadi_c_logger_write;
767  p_.flush = &casadi_c_logger_flush;
768 
769  casadi_fatrop_setup(&p_);
770 }
771 
772  void codegen_unpack_block(CodeGenerator& g, const std::string& name,
773  const std::vector<casadi_ocp_block>& blocks) {
774  casadi_int sz = blocks.size();
775  if (sz==0) sz++;
776  std::string n = "block_" + name + "[" + str(sz) + "]";
777  g.local(n, "static struct casadi_ocp_block");
778  g << "p." << name << " = block_" + name + ";\n";
779  g << "casadi_unpack_ocp_blocks(" << "p." << name
780  << ", " << g.constant(fatrop_blocks_pack(blocks)) << ");\n";
781  }
782 
783  void unpack_block(const std::vector<casadi_int>& p, std::vector<casadi_ocp_block>& blocks) {
784  const casadi_int* packed = get_ptr(p);
785  casadi_int N = *packed++;
786  blocks.resize(N);
787  for (casadi_int i=0;i<N;++i) {
788  blocks[i].offset_r = *packed++;
789  blocks[i].offset_c = *packed++;
790  blocks[i].rows = *packed++;
791  blocks[i].cols = *packed++;
792  }
793  }
794 
796  if (jacg_sp_.size1()>0 && jacg_sp_.nnz()==0) {
797  casadi_error("Empty sparsity pattern not supported in FATROP C interface");
798  }
799  g << "d->nlp = &d_nlp;\n";
800  g << "d->prob = &p;\n";
801  g << "p.nlp = &p_nlp;\n";
802 
803  g << "p.nx = " << g.constant(nxs_) << ";\n";
804  g << "p.nu = " << g.constant(nus_) << ";\n";
805  g << "p.ABsp = " << g.sparsity(ABsp_) << ";\n";
806  g << "p.AB_offsets = " << g.constant(AB_offsets_) << ";\n";
807  g << "p.CDsp = " << g.sparsity(CDsp_) << ";\n";
808  g << "p.CD_offsets = " << g.constant(CD_offsets_) << ";\n";
809  g << "p.RSQsp = " << g.sparsity(RSQsp_) << ";\n";
810  g << "p.RSQ_offsets = " << g.constant(RSQ_offsets_) << ";\n";
811  g << "p.Isp = " << g.sparsity(Isp_) << ";\n";
812  g << "p.I_offsets = " << g.constant(I_offsets_) << ";\n";
813 
814  codegen_unpack_block(g, "AB", AB_blocks_);
815  codegen_unpack_block(g, "CD", CD_blocks_);
816  codegen_unpack_block(g, "RSQ", RSQ_blocks_);
817  codegen_unpack_block(g, "I", I_blocks_);
818  g << "p.N = " << N_ << ";\n";
819 
820  g.setup_callback("p.nlp_jac_g", get_function("nlp_jac_g"));
821  g.setup_callback("p.nlp_grad_f", get_function("nlp_grad_f"));
822  g.setup_callback("p.nlp_f", get_function("nlp_f"));
823  g.setup_callback("p.nlp_g", get_function("nlp_g"));
824  g.setup_callback("p.nlp_hess_l", get_function("nlp_hess_l"));
825 
826  g << "p.sp_a = " << g.sparsity(jacg_sp_) << ";\n";
827  if (exact_hessian_) {
828  g << "p.sp_h = " << g.sparsity(hesslag_sp_) << ";\n";
829  } else {
830  g << "p.sp_h = 0;\n";
831  }
832 
833  g << "p.write = &" << g.shorthand("fatrop_cb_write") << ";\n";
834  g << "p.flush = &" << g.shorthand("fatrop_cb_flush") << ";\n";
835 
836  g << "casadi_fatrop_setup(&p);\n";
837 
838 }
839 
841  s.version("FatropInterface", 1);
842  s.unpack("FatropInterface::jacg_sp", jacg_sp_);
843  s.unpack("FatropInterface::hesslag_sp", hesslag_sp_);
844  s.unpack("FatropInterface::exact_hessian", exact_hessian_);
845  s.unpack("FatropInterface::opts", opts_);
846  s.unpack("FatropInterface::convexify", convexify_);
847 
848 
849  s.unpack("FatropInterface::Isp", Isp_);
850  s.unpack("FatropInterface::ABsp", ABsp_);
851  s.unpack("FatropInterface::CDsp", CDsp_);
852  s.unpack("FatropInterface::RSQsp", RSQsp_);
853 
854  std::vector<casadi_int> AB_blocks;
855  s.unpack("FatropInterface::AB_blocks", AB_blocks);
856  unpack_block(AB_blocks, AB_blocks_);
857  std::vector<casadi_int> CD_blocks;
858  s.unpack("FatropInterface::CD_blocks", CD_blocks);
859  unpack_block(CD_blocks, CD_blocks_);
860  std::vector<casadi_int> RSQ_blocks;
861  s.unpack("FatropInterface::RSQ_blocks", RSQ_blocks);
862  unpack_block(RSQ_blocks, RSQ_blocks_);
863  std::vector<casadi_int> I_blocks;
864  s.unpack("FatropInterface::I_blocks", I_blocks);
865  unpack_block(I_blocks, I_blocks_);
866 
867  s.unpack("FatropInterface::nxs", nxs_);
868  s.unpack("FatropInterface::nus", nus_);
869  s.unpack("FatropInterface::ngs", ngs_);
870  s.unpack("FatropInterface::N", N_);
871 
872  casadi_int structure_detection;
873  s.unpack("FatropInterface::structure_detection", structure_detection);
874  structure_detection_ = static_cast<StructureDetection>(structure_detection);
875 
876 
877  s.unpack("FatropInterface::AB_offsets", AB_offsets_);
878  s.unpack("FatropInterface::CD_offsets", CD_offsets_);
879  s.unpack("FatropInterface::RSQ_offsets", RSQ_offsets_);
880  s.unpack("FatropInterface::I_offsets", I_offsets_);
881  s.unpack("FatropInterface::debug", debug_);
882 
883  set_fatrop_prob();
884 }
885 
888  s.version("FatropInterface", 1);
889 
890  s.pack("FatropInterface::jacg_sp", jacg_sp_);
891  s.pack("FatropInterface::hesslag_sp", hesslag_sp_);
892  s.pack("FatropInterface::exact_hessian", exact_hessian_);
893  s.pack("FatropInterface::opts", opts_);
894  s.pack("FatropInterface::convexify", convexify_);
895 
896  s.pack("FatropInterface::Isp", Isp_);
897  s.pack("FatropInterface::ABsp", ABsp_);
898  s.pack("FatropInterface::CDsp", CDsp_);
899  s.pack("FatropInterface::RSQsp", RSQsp_);
900 
901  s.pack("FatropInterface::AB_blocks", fatrop_blocks_pack(AB_blocks_));
902  s.pack("FatropInterface::CD_blocks", fatrop_blocks_pack(CD_blocks_));
903  s.pack("FatropInterface::RSQ_blocks", fatrop_blocks_pack(RSQ_blocks_));
904  s.pack("FatropInterface::I_blocks", fatrop_blocks_pack(I_blocks_));
905 
906  s.pack("FatropInterface::nxs", nxs_);
907  s.pack("FatropInterface::nus", nus_);
908  s.pack("FatropInterface::ngs", ngs_);
909  s.pack("FatropInterface::N", N_);
910  s.pack("FatropInterface::structure_detection", static_cast<casadi_int>(structure_detection_));
911  s.pack("FatropInterface::AB_offsets", AB_offsets_);
912  s.pack("FatropInterface::CD_offsets", CD_offsets_);
913  s.pack("FatropInterface::RSQ_offsets", RSQ_offsets_);
914  s.pack("FatropInterface::I_offsets", I_offsets_);
915  s.pack("FatropInterface::debug", debug_);
916 }
917 
918 } // namespace casadi
Helper class for C code generation.
std::string add_dependency(const Function &f)
Add a function dependency.
void scope_enter()
Enter a local scope.
std::string constant(const std::vector< casadi_int > &v)
Represent an array constant; adding it when new.
void flush(std::ostream &s)
Flush the buffer to a stream of choice.
void local(const std::string &name, const std::string &type, const std::string &ref="")
Declare a local variable.
void setup_callback(const std::string &s, const Function &f)
Setup a callback.
void scope_exit()
Exit a local scope.
void init_local(const std::string &name, const std::string &def)
Specify the default value for a local variable.
std::string sanitize_source(const std::string &src, const std::vector< std::string > &inst, bool add_shorthand=true)
Sanitize source files for codegen.
void add_include(const std::string &new_include, bool relative_path=false, const std::string &use_ifdef=std::string())
Add an include file optionally using a relative path "..." instead of an absolute path <....
std::string shorthand(const std::string &name) const
Get a shorthand.
std::stringstream body
std::string sparsity(const Sparsity &sp, bool canonical=true)
std::stringstream auxiliaries
void add_auxiliary(Auxiliary f, const std::vector< std::string > &inst={"casadi_real"})
Add a built-in auxiliary function.
static Sparsity setup(ConvexifyData &d, const Sparsity &H, const Dict &opts=Dict(), bool inplace=true)
Definition: convexify.cpp:166
Helper class for Serialization.
void unpack(Sparsity &e)
Reconstruct an object from the input stream.
void version(const std::string &name, int v)
void codegen_init_mem(CodeGenerator &g) const override
Codegen alloc_mem.
void codegen_body(CodeGenerator &g) const override
Generate code for the function body.
void free_mem(void *mem) const override
Free memory block.
void init(const Dict &opts) override
Initialize.
Dict get_stats(void *mem) const override
Get all statistics.
void codegen_declarations(CodeGenerator &g) const override
Generate code for the declarations of the C function.
int init_mem(void *mem) const override
Initalize memory block.
Dict opts_
All FATROP options.
static ProtoFunction * deserialize(DeserializingStream &s)
Deserialize into MX.
void set_work(void *mem, const double **&arg, double **&res, casadi_int *&iw, double *&w) const override
Set the (persistent) work vectors.
ConvexifyData convexify_data_
Data for convexification.
FatropInterface(const std::string &name, const Function &nlp)
void codegen_free_mem(CodeGenerator &g) const override
Codegen free_mem.
static const Options options_
Options.
void serialize_body(SerializingStream &s) const override
Serialize an object without type information.
static const std::string meta_doc
A documentation string.
bool exact_hessian_
Exact Hessian?
int solve(void *mem) const override
static Nlpsol * creator(const std::string &name, const Function &nlp)
Create a new NLP Solver.
void alloc_iw(size_t sz_iw, bool persistent=false)
Ensure required length of iw field.
void alloc_res(size_t sz_res, bool persistent=false)
Ensure required length of res field.
void alloc_arg(size_t sz_arg, bool persistent=false)
Ensure required length of arg field.
std::string codegen_mem(CodeGenerator &g, const std::string &index="mem") const
Get thread-local memory object.
size_t sz_res() const
Get required length of res field.
size_t sz_w() const
Get required length of w field.
void alloc_w(size_t sz_w, bool persistent=false)
Ensure required length of w field.
size_t sz_arg() const
Get required length of arg field.
size_t sz_iw() const
Get required length of iw field.
Function object.
Definition: function.hpp:60
static MatType zeros(casadi_int nrow=1, casadi_int ncol=1)
Create a dense matrix or a matrix with specified sparsity with all entries zero.
static casadi_int start_index
static Matrix< double > eye(casadi_int n)
create an n-by-n identity matrix
NLP solver storage class.
Definition: nlpsol_impl.hpp:59
void codegen_body_exit(CodeGenerator &g) const override
Generate code for the function body.
Definition: nlpsol.cpp:1269
Dict get_stats(void *mem) const override
Get all statistics.
Definition: nlpsol.cpp:1162
static const Options options_
Options.
void codegen_body_enter(CodeGenerator &g) const override
Generate code for the function body.
Definition: nlpsol.cpp:1179
void codegen_declarations(CodeGenerator &g) const override
Generate code for the declarations of the C function.
Definition: nlpsol.cpp:1250
void init(const Dict &opts) override
Initialize.
Definition: nlpsol.cpp:420
casadi_int ng_
Number of constraints.
Definition: nlpsol_impl.hpp:69
int init_mem(void *mem) const override
Initalize memory block.
Definition: nlpsol.cpp:603
std::vector< bool > equality_
Options.
casadi_nlpsol_prob< double > p_nlp_
Definition: nlpsol_impl.hpp:63
void serialize_body(SerializingStream &s) const override
Serialize an object without type information.
Definition: nlpsol.cpp:1306
bool calc_f_
Options.
Definition: nlpsol_impl.hpp:97
bool calc_g_
Options.
Definition: nlpsol_impl.hpp:97
casadi_int nx_
Number of variables.
Definition: nlpsol_impl.hpp:66
void set_work(void *mem, const double **&arg, double **&res, casadi_int *&iw, double *&w) const override
Set the (persistent) work vectors.
Definition: nlpsol.cpp:795
Function create_function(const Function &oracle, const std::string &fname, const std::vector< std::string > &s_in, const std::vector< std::string > &s_out, const Function::AuxOut &aux=Function::AuxOut(), const Dict &opts=Dict())
std::vector< std::string > get_function() const override
Get list of dependency functions.
bool has_function(const std::string &fname) const override
static void registerPlugin(const Plugin &plugin, bool needs_lock=true)
Register an integrator in the factory.
bool error_on_fail_
Throw an exception on failure?
bool verbose_
Verbose printout.
void clear_mem()
Clear all memory (called from destructor)
Helper class for Serialization.
void version(const std::string &name, int v)
void pack(const Sparsity &e)
Serializes an object to the output stream.
General sparsity class.
Definition: sparsity.hpp:106
casadi_int size1() const
Get the number of rows.
Definition: sparsity.cpp:124
Sparsity T() const
Transpose the matrix.
Definition: sparsity.cpp:394
casadi_int nnz() const
Get the number of (structural) non-zeros.
Definition: sparsity.cpp:148
casadi_int size2() const
Get the number of columns.
Definition: sparsity.cpp:128
std::vector< casadi_int > get_colind() const
Get the column index for each column.
Definition: sparsity.cpp:364
void to_file(const std::string &filename, const std::string &format_hint="") const
Definition: sparsity.cpp:1906
std::vector< casadi_int > get_row() const
Get the row for each non-zero entry.
Definition: sparsity.cpp:372
bool is_symmetric() const
Is symmetric?
Definition: sparsity.cpp:317
The casadi namespace.
Definition: archiver.cpp:28
void unpack_block(const std::vector< casadi_int > &p, std::vector< casadi_ocp_block > &blocks)
std::vector< casadi_int > range(casadi_int start, casadi_int stop, casadi_int step, casadi_int len)
Range function.
std::vector< casadi_int > fatrop_blocks_pack(const std::vector< casadi_ocp_block > &blocks)
void CASADI_NLPSOL_FATROP_EXPORT casadi_load_nlpsol_fatrop()
@ OT_INTVECTOR
std::string str(const T &v)
String representation, any type.
GenericType::Dict Dict
C++ equivalent of Python's dict or MATLAB's struct.
int CASADI_NLPSOL_FATROP_EXPORT casadi_register_nlpsol_fatrop(Nlpsol::Plugin *plugin)
T * get_ptr(std::vector< T > &v)
Get a pointer to the data contained in the vector.
T sum(const std::vector< T > &values)
sum
void report_issue(casadi_int i, const std::string &msg)
void codegen_unpack_block(CodeGenerator &g, const std::string &name, const std::vector< casadi_ocp_block > &blocks)
UnifiedReturnStatus
casadi_fatrop_data< double > d
struct FatropOcpCStats stats
OracleCallback nlp_grad_f
const casadi_int * I_offsets
const casadi_int * sp_h
const casadi_int * nx
const casadi_int * RSQsp
OracleCallback nlp_hess_l
FatropOcpCWrite write
const casadi_int * nu
casadi_ocp_block * I
const casadi_int * sp_a
FatropOcpCFlush flush
casadi_ocp_block * AB
const casadi_int * CDsp
OracleCallback nlp_g
casadi_ocp_block * RSQ
const casadi_int * RSQ_offsets
casadi_ocp_block * CD
const casadi_int * CD_offsets
const casadi_int * Isp
const casadi_nlpsol_prob< T1 > * nlp
OracleCallback nlp_f
OracleCallback nlp_jac_g
const casadi_int * ABsp
const casadi_int * AB_offsets