Sleipnir C++ API
Loading...
Searching...
No Matches
problem.hpp
1// Copyright (c) Sleipnir contributors
2
3#pragma once
4
5#include <algorithm>
6#include <array>
7#include <cmath>
8#include <concepts>
9#include <functional>
10#include <iterator>
11#include <memory>
12#include <optional>
13#include <ranges>
14#include <utility>
15
16#include <Eigen/Core>
17#include <Eigen/SparseCore>
18#include <gch/small_vector.hpp>
19
20#include "sleipnir/autodiff/expression_type.hpp"
21#include "sleipnir/autodiff/gradient.hpp"
22#include "sleipnir/autodiff/hessian.hpp"
23#include "sleipnir/autodiff/jacobian.hpp"
24#include "sleipnir/autodiff/variable.hpp"
25#include "sleipnir/autodiff/variable_matrix.hpp"
26#include "sleipnir/optimization/solver/exit_status.hpp"
27#include "sleipnir/optimization/solver/interior_point.hpp"
28#include "sleipnir/optimization/solver/iteration_info.hpp"
29#include "sleipnir/optimization/solver/newton.hpp"
30#include "sleipnir/optimization/solver/options.hpp"
31#include "sleipnir/optimization/solver/sqp.hpp"
32#include "sleipnir/optimization/solver/util/bounds.hpp"
33#include "sleipnir/util/empty.hpp"
34#include "sleipnir/util/print.hpp"
35#include "sleipnir/util/print_diagnostics.hpp"
36#include "sleipnir/util/profiler.hpp"
37#include "sleipnir/util/spy.hpp"
38#include "sleipnir/util/symbol_exports.hpp"
39
40namespace slp {
41
65template <typename Scalar>
66class Problem {
67 public:
70
76 [[nodiscard]]
78 m_decision_variables.emplace_back();
79 return m_decision_variables.back();
80 }
81
89 [[nodiscard]]
90 VariableMatrix<Scalar> decision_variable(int rows, int cols = 1) {
91 m_decision_variables.reserve(m_decision_variables.size() + rows * cols);
92
93 VariableMatrix<Scalar> vars{detail::empty, rows, cols};
94
95 for (int row = 0; row < rows; ++row) {
96 for (int col = 0; col < cols; ++col) {
97 m_decision_variables.emplace_back();
98 vars[row, col] = m_decision_variables.back();
99 }
100 }
101
102 return vars;
103 }
104
116 [[nodiscard]]
118 // We only need to store the lower triangle of an n x n symmetric matrix;
119 // the other elements are duplicates. The lower triangle has (n² + n)/2
120 // elements.
121 //
122 // n
123 // Σ k = (n² + n)/2
124 // k=1
125 m_decision_variables.reserve(m_decision_variables.size() +
126 (rows * rows + rows) / 2);
127
128 VariableMatrix<Scalar> vars{detail::empty, rows, rows};
129
130 for (int row = 0; row < rows; ++row) {
131 for (int col = 0; col <= row; ++col) {
132 m_decision_variables.emplace_back();
133 vars[row, col] = m_decision_variables.back();
134 vars[col, row] = m_decision_variables.back();
135 }
136 }
137
138 return vars;
139 }
140
150 void minimize(const Variable<Scalar>& cost) { m_f = cost; }
151
161 void minimize(Variable<Scalar>&& cost) { m_f = std::move(cost); }
162
173 // Maximizing a cost function is the same as minimizing its negative
174 m_f = -objective;
175 }
176
187 // Maximizing a cost function is the same as minimizing its negative
188 m_f = -std::move(objective);
189 }
190
196 m_equality_constraints.reserve(m_equality_constraints.size() +
197 constraint.constraints.size());
198 std::ranges::copy(constraint.constraints,
199 std::back_inserter(m_equality_constraints));
200 }
201
207 m_equality_constraints.reserve(m_equality_constraints.size() +
208 constraint.constraints.size());
209 std::ranges::copy(constraint.constraints,
210 std::back_inserter(m_equality_constraints));
211 }
212
218 m_inequality_constraints.reserve(m_inequality_constraints.size() +
219 constraint.constraints.size());
220 std::ranges::copy(constraint.constraints,
221 std::back_inserter(m_inequality_constraints));
222 }
223
229 m_inequality_constraints.reserve(m_inequality_constraints.size() +
230 constraint.constraints.size());
231 std::ranges::copy(constraint.constraints,
232 std::back_inserter(m_inequality_constraints));
233 }
234
238 ExpressionType cost_function_type() const {
239 if (m_f) {
240 return m_f.value().type();
241 } else {
242 return ExpressionType::NONE;
243 }
244 }
245
249 ExpressionType equality_constraint_type() const {
250 if (!m_equality_constraints.empty()) {
251 return std::ranges::max(m_equality_constraints, {},
253 .type();
254 } else {
255 return ExpressionType::NONE;
256 }
257 }
258
262 ExpressionType inequality_constraint_type() const {
263 if (!m_inequality_constraints.empty()) {
264 return std::ranges::max(m_inequality_constraints, {},
266 .type();
267 } else {
268 return ExpressionType::NONE;
269 }
270 }
271
280 ExitStatus solve(const Options& options = Options{},
281 [[maybe_unused]] bool spy = false) {
282 using DenseVector = Eigen::Vector<Scalar, Eigen::Dynamic>;
283 using SparseMatrix = Eigen::SparseMatrix<Scalar>;
284 using SparseVector = Eigen::SparseVector<Scalar>;
285
286 // Create the initial value column vector
287 DenseVector x{m_decision_variables.size()};
288 for (size_t i = 0; i < m_decision_variables.size(); ++i) {
289 x[i] = m_decision_variables[i].value();
290 }
291
292 if (options.diagnostics) {
293 print_exit_conditions(options);
294 print_problem_analysis();
295 }
296
297 // Get the highest order constraint expression types
298 auto f_type = cost_function_type();
299 auto c_e_type = equality_constraint_type();
300 auto c_i_type = inequality_constraint_type();
301
302 // If the problem is empty or constant, there's nothing to do
303 if (f_type <= ExpressionType::CONSTANT &&
304 c_e_type <= ExpressionType::CONSTANT &&
305 c_i_type <= ExpressionType::CONSTANT) {
306#ifndef SLEIPNIR_DISABLE_DIAGNOSTICS
307 if (options.diagnostics) {
308 slp::println("\nInvoking no-op solver\n");
309 }
310#endif
311 return ExitStatus::SUCCESS;
312 }
313
314 VariableMatrix<Scalar> x_ad{m_decision_variables};
315
316 // Set up cost function
317 Variable f = m_f.value_or(Scalar(0));
318
319 int num_decision_variables = m_decision_variables.size();
320 int num_equality_constraints = m_equality_constraints.size();
321 int num_inequality_constraints = m_inequality_constraints.size();
322
323 gch::small_vector<std::function<bool(const IterationInfo<Scalar>& info)>>
324 iteration_callbacks;
325 for (const auto& callback : m_iteration_callbacks) {
326 iteration_callbacks.emplace_back(callback);
327 }
328 for (const auto& callback : m_persistent_iteration_callbacks) {
329 iteration_callbacks.emplace_back(callback);
330 }
331
332 // Solve the optimization problem
333 ExitStatus status;
334 if (m_equality_constraints.empty() && m_inequality_constraints.empty()) {
335 if (options.diagnostics) {
336 slp::println("\nInvoking Newton solver\n");
337 }
338
339 gch::small_vector<SetupProfiler> ad_setup_profilers;
340 ad_setup_profilers.emplace_back("setup");
341 ad_setup_profilers.emplace_back("↳ ∇f(x)");
342 ad_setup_profilers.emplace_back("↳ ∇²ₓₓL");
343
344 ad_setup_profilers[0].start();
345
346 // Set up gradient autodiff
347 ad_setup_profilers[1].start();
348 Gradient g{f, x_ad};
349 ad_setup_profilers[1].stop();
350
351 // Set up Lagrangian Hessian autodiff
352 ad_setup_profilers[2].start();
353 Hessian<Scalar, Eigen::Lower> H{f, x_ad};
354 ad_setup_profilers[2].stop();
355
356 ad_setup_profilers[0].stop();
357
358 if (options.diagnostics) {
359 print_setup_diagnostics(ad_setup_profilers);
360 }
361
362#ifndef SLEIPNIR_DISABLE_DIAGNOSTICS
363 // Sparsity pattern files written when spy flag is set
364 std::unique_ptr<Spy<Scalar>> H_spy;
365
366 if (spy) {
367 H_spy = std::make_unique<Spy<Scalar>>(
368 "H.spy", "Hessian", "Decision variables", "Decision variables",
369 num_decision_variables, num_decision_variables);
370 iteration_callbacks.push_back(
371 [&](const IterationInfo<Scalar>& info) -> bool {
372 H_spy->add(info.H);
373 return false;
374 });
375 }
376#endif
377
378 NewtonMatrixCallbacks<Scalar> matrix_callbacks{
379 num_decision_variables,
380 [&](const DenseVector& x) -> Scalar {
381 x_ad.set_value(x);
382 return f.value();
383 },
384 [&](const DenseVector& x) -> SparseVector {
385 x_ad.set_value(x);
386 return g.value();
387 },
388 [&](const DenseVector& x) -> SparseMatrix {
389 x_ad.set_value(x);
390 return H.value();
391 }};
392
393 // Invoke Newton solver
394 status =
395 newton<Scalar>(matrix_callbacks, iteration_callbacks, options, x);
396 } else if (m_inequality_constraints.empty()) {
397 if (options.diagnostics) {
398 slp::println("\nInvoking SQP solver\n");
399 }
400
401 VariableMatrix<Scalar> c_e_ad{m_equality_constraints};
402 VariableMatrix<Scalar> y_ad(num_equality_constraints);
403
404 gch::small_vector<SetupProfiler> ad_setup_profilers;
405 ad_setup_profilers.emplace_back("setup");
406 ad_setup_profilers.emplace_back("↳ ∇f(x)");
407 ad_setup_profilers.emplace_back("↳ ∇²ₓₓL");
408 ad_setup_profilers.emplace_back(" ↳ ∇²ₓₓL_f");
409 ad_setup_profilers.emplace_back(" ↳ ∇²ₓₓL_c");
410 ad_setup_profilers.emplace_back("↳ ∂cₑ/∂x");
411
412 ad_setup_profilers[0].start();
413
414 // Set up gradient autodiff
415 ad_setup_profilers[1].start();
416 Gradient g{f, x_ad};
417 ad_setup_profilers[1].stop();
418
419 ad_setup_profilers[2].start();
420
421 // Set up cost part of Lagrangian Hessian autodiff
422 ad_setup_profilers[3].start();
423 Hessian<Scalar, Eigen::Lower> H_f{f, x_ad};
424 ad_setup_profilers[3].stop();
425
426 // Set up constraint part of Lagrangian Hessian autodiff
427 ad_setup_profilers[4].start();
428 Hessian<Scalar, Eigen::Lower> H_c{-y_ad.T() * c_e_ad, x_ad};
429 ad_setup_profilers[4].stop();
430
431 ad_setup_profilers[2].stop();
432
433 // Set up equality constraint Jacobian autodiff
434 ad_setup_profilers[5].start();
435 Jacobian A_e{c_e_ad, x_ad};
436 ad_setup_profilers[5].stop();
437
438 ad_setup_profilers[0].stop();
439
440 if (options.diagnostics) {
441 print_setup_diagnostics(ad_setup_profilers);
442 }
443
444#ifndef SLEIPNIR_DISABLE_DIAGNOSTICS
445 // Sparsity pattern files written when spy flag is set
446 std::unique_ptr<Spy<Scalar>> H_spy;
447 std::unique_ptr<Spy<Scalar>> A_e_spy;
448
449 if (spy) {
450 H_spy = std::make_unique<Spy<Scalar>>(
451 "H.spy", "Hessian", "Decision variables", "Decision variables",
452 num_decision_variables, num_decision_variables);
453 A_e_spy = std::make_unique<Spy<Scalar>>(
454 "A_e.spy", "Equality constraint Jacobian", "Constraints",
455 "Decision variables", num_equality_constraints,
456 num_decision_variables);
457 iteration_callbacks.push_back(
458 [&](const IterationInfo<Scalar>& info) -> bool {
459 H_spy->add(info.H);
460 A_e_spy->add(info.A_e);
461 return false;
462 });
463 }
464#endif
465
466 SQPMatrixCallbacks<Scalar> matrix_callbacks{
467 num_decision_variables,
468 num_equality_constraints,
469 [&](const DenseVector& x) -> Scalar {
470 x_ad.set_value(x);
471 return f.value();
472 },
473 [&](const DenseVector& x) -> SparseVector {
474 x_ad.set_value(x);
475 return g.value();
476 },
477 [&](const DenseVector& x, const DenseVector& y) -> SparseMatrix {
478 x_ad.set_value(x);
479 y_ad.set_value(y);
480 return H_f.value() + H_c.value();
481 },
482 [&](const DenseVector& x, const DenseVector& y) -> SparseMatrix {
483 x_ad.set_value(x);
484 y_ad.set_value(y);
485 return H_c.value();
486 },
487 [&](const DenseVector& x) -> DenseVector {
488 x_ad.set_value(x);
489 return c_e_ad.value();
490 },
491 [&](const DenseVector& x) -> SparseMatrix {
492 x_ad.set_value(x);
493 return A_e.value();
494 }};
495
496 // Invoke SQP solver
497 status = sqp<Scalar>(matrix_callbacks, iteration_callbacks, options, x);
498 } else {
499 if (options.diagnostics) {
500 slp::println("\nInvoking IPM solver\n");
501 }
502
503 VariableMatrix<Scalar> c_e_ad{m_equality_constraints};
504 VariableMatrix<Scalar> c_i_ad{m_inequality_constraints};
505 VariableMatrix<Scalar> y_ad(num_equality_constraints);
506 VariableMatrix<Scalar> z_ad(num_inequality_constraints);
507
508 gch::small_vector<SetupProfiler> ad_setup_profilers;
509 ad_setup_profilers.emplace_back("setup");
510 ad_setup_profilers.emplace_back("↳ ∇f(x)");
511 ad_setup_profilers.emplace_back("↳ ∇²ₓₓL");
512 ad_setup_profilers.emplace_back(" ↳ ∇²ₓₓL_f");
513 ad_setup_profilers.emplace_back(" ↳ ∇²ₓₓL_c");
514 ad_setup_profilers.emplace_back("↳ ∂cₑ/∂x");
515 ad_setup_profilers.emplace_back("↳ ∂cᵢ/∂x");
516
517 ad_setup_profilers[0].start();
518
519 // Set up gradient autodiff
520 ad_setup_profilers[1].start();
521 Gradient g{f, x_ad};
522 ad_setup_profilers[1].stop();
523
524 ad_setup_profilers[2].start();
525
526 // Set up cost part of Lagrangian Hessian autodiff
527 ad_setup_profilers[3].start();
528 Hessian<Scalar, Eigen::Lower> H_f{f, x_ad};
529 ad_setup_profilers[3].stop();
530
531 // Set up constraint part of Lagrangian Hessian autodiff
532 ad_setup_profilers[4].start();
533 Hessian<Scalar, Eigen::Lower> H_c{-y_ad.T() * c_e_ad - z_ad.T() * c_i_ad,
534 x_ad};
535 ad_setup_profilers[4].stop();
536
537 ad_setup_profilers[2].stop();
538
539 // Set up equality constraint Jacobian autodiff
540 ad_setup_profilers[5].start();
541 Jacobian A_e{c_e_ad, x_ad};
542 ad_setup_profilers[5].stop();
543
544 // Set up inequality constraint Jacobian autodiff
545 ad_setup_profilers[6].start();
546 Jacobian A_i{c_i_ad, x_ad};
547 ad_setup_profilers[6].stop();
548
549 ad_setup_profilers[0].stop();
550
551 if (options.diagnostics) {
552 print_setup_diagnostics(ad_setup_profilers);
553 }
554
555#ifndef SLEIPNIR_DISABLE_DIAGNOSTICS
556 // Sparsity pattern files written when spy flag is set
557 std::unique_ptr<Spy<Scalar>> H_spy;
558 std::unique_ptr<Spy<Scalar>> A_e_spy;
559 std::unique_ptr<Spy<Scalar>> A_i_spy;
560
561 if (spy) {
562 H_spy = std::make_unique<Spy<Scalar>>(
563 "H.spy", "Hessian", "Decision variables", "Decision variables",
564 num_decision_variables, num_decision_variables);
565 A_e_spy = std::make_unique<Spy<Scalar>>(
566 "A_e.spy", "Equality constraint Jacobian", "Constraints",
567 "Decision variables", num_equality_constraints,
568 num_decision_variables);
569 A_i_spy = std::make_unique<Spy<Scalar>>(
570 "A_i.spy", "Inequality constraint Jacobian", "Constraints",
571 "Decision variables", num_inequality_constraints,
572 num_decision_variables);
573 iteration_callbacks.push_back(
574 [&](const IterationInfo<Scalar>& info) -> bool {
575 H_spy->add(info.H);
576 A_e_spy->add(info.A_e);
577 A_i_spy->add(info.A_i);
578 return false;
579 });
580 }
581#endif
582
583 const auto [bound_constraint_mask, bounds, conflicting_bound_indices] =
584 get_bounds<Scalar>(m_decision_variables, m_inequality_constraints,
585 A_i.value());
586 if (!conflicting_bound_indices.empty()) {
587 if (options.diagnostics) {
588 print_bound_constraint_global_infeasibility_error(
589 conflicting_bound_indices);
590 }
591 return ExitStatus::GLOBALLY_INFEASIBLE;
592 }
593
594#ifdef SLEIPNIR_ENABLE_BOUND_PROJECTION
595 project_onto_bounds(x, bounds);
596#endif
597
598 InteriorPointMatrixCallbacks<Scalar> matrix_callbacks{
599 num_decision_variables,
600 num_equality_constraints,
601 num_inequality_constraints,
602 [&](const DenseVector& x) -> Scalar {
603 x_ad.set_value(x);
604 return f.value();
605 },
606 [&](const DenseVector& x) -> SparseVector {
607 x_ad.set_value(x);
608 return g.value();
609 },
610 [&](const DenseVector& x, const DenseVector& y,
611 const DenseVector& z) -> SparseMatrix {
612 x_ad.set_value(x);
613 y_ad.set_value(y);
614 z_ad.set_value(z);
615 return H_f.value() + H_c.value();
616 },
617 [&](const DenseVector& x, const DenseVector& y,
618 const DenseVector& z) -> SparseMatrix {
619 x_ad.set_value(x);
620 y_ad.set_value(y);
621 z_ad.set_value(z);
622 return H_c.value();
623 },
624 [&](const DenseVector& x) -> DenseVector {
625 x_ad.set_value(x);
626 return c_e_ad.value();
627 },
628 [&](const DenseVector& x) -> SparseMatrix {
629 x_ad.set_value(x);
630 return A_e.value();
631 },
632 [&](const DenseVector& x) -> DenseVector {
633 x_ad.set_value(x);
634 return c_i_ad.value();
635 },
636 [&](const DenseVector& x) -> SparseMatrix {
637 x_ad.set_value(x);
638 return A_i.value();
639 }};
640
641 // Invoke interior-point method solver
642 status =
643 interior_point<Scalar>(matrix_callbacks, iteration_callbacks, options,
644#ifdef SLEIPNIR_ENABLE_BOUND_PROJECTION
645 bound_constraint_mask,
646#endif
647 x);
648 }
649
650 if (options.diagnostics) {
651 slp::println("\nExit: {}", status);
652 }
653
654 // Assign the solution to the original Variable instances
655 VariableMatrix<Scalar>{m_decision_variables}.set_value(x);
656
657 return status;
658 }
659
665 template <typename F>
666 requires requires(F callback, const IterationInfo<Scalar>& info) {
667 { callback(info) } -> std::same_as<void>;
668 }
670 m_iteration_callbacks.emplace_back(
671 [=, callback =
672 std::forward<F>(callback)](const IterationInfo<Scalar>& info) {
673 callback(info);
674 return false;
675 });
676 }
677
684 template <typename F>
685 requires requires(F callback, const IterationInfo<Scalar>& info) {
686 { callback(info) } -> std::same_as<bool>;
687 }
689 m_iteration_callbacks.emplace_back(std::forward<F>(callback));
690 }
691
693 void clear_callbacks() { m_iteration_callbacks.clear(); }
694
703 template <typename F>
704 requires requires(F callback, const IterationInfo<Scalar>& info) {
705 { callback(info) } -> std::same_as<bool>;
706 }
708 m_persistent_iteration_callbacks.emplace_back(std::forward<F>(callback));
709 }
710
711 private:
712 // The list of decision variables, which are the root of the problem's
713 // expression tree
714 gch::small_vector<Variable<Scalar>> m_decision_variables;
715
716 // The cost function: f(x)
717 std::optional<Variable<Scalar>> m_f;
718
719 // The list of equality constraints: cₑ(x) = 0
720 gch::small_vector<Variable<Scalar>> m_equality_constraints;
721
722 // The list of inequality constraints: cᵢ(x) ≥ 0
723 gch::small_vector<Variable<Scalar>> m_inequality_constraints;
724
725 // The iteration callbacks
726 gch::small_vector<std::function<bool(const IterationInfo<Scalar>& info)>>
727 m_iteration_callbacks;
728 gch::small_vector<std::function<bool(const IterationInfo<Scalar>& info)>>
729 m_persistent_iteration_callbacks;
730
731 void print_exit_conditions([[maybe_unused]] const Options& options) {
732 // Print possible exit conditions
733 slp::println("User-configured exit conditions:");
734 slp::println(" ↳ error below {}", options.tolerance);
735 if (!m_iteration_callbacks.empty() ||
736 !m_persistent_iteration_callbacks.empty()) {
737 slp::println(" ↳ iteration callback requested stop");
738 }
739 if (std::isfinite(options.max_iterations)) {
740 slp::println(" ↳ executed {} iterations", options.max_iterations);
741 }
742 if (std::isfinite(options.timeout.count())) {
743 slp::println(" ↳ {} elapsed", options.timeout);
744 }
745 }
746
747 void print_problem_analysis() {
748 constexpr std::array types{"no", "constant", "linear", "quadratic",
749 "nonlinear"};
750
751 // Print problem structure
752 slp::println("\nProblem structure:");
753 slp::println(" ↳ {} cost function",
754 types[std::to_underlying(cost_function_type())]);
755 slp::println(" ↳ {} equality constraints",
756 types[std::to_underlying(equality_constraint_type())]);
757 slp::println(" ↳ {} inequality constraints",
758 types[std::to_underlying(inequality_constraint_type())]);
759
760 if (m_decision_variables.size() == 1) {
761 slp::print("\n1 decision variable\n");
762 } else {
763 slp::print("\n{} decision variables\n", m_decision_variables.size());
764 }
765
766 auto print_constraint_types =
767 [](const gch::small_vector<Variable<Scalar>>& constraints) {
768 std::array<size_t, 5> counts{};
769 for (const auto& constraint : constraints) {
770 ++counts[std::to_underlying(constraint.type())];
771 }
772 for (const auto& [count, name] :
773 std::views::zip(counts, std::array{"empty", "constant", "linear",
774 "quadratic", "nonlinear"})) {
775 if (count > 0) {
776 slp::println(" ↳ {} {}", count, name);
777 }
778 }
779 };
780
781 // Print constraint types
782 if (m_equality_constraints.size() == 1) {
783 slp::println("1 equality constraint");
784 } else {
785 slp::println("{} equality constraints", m_equality_constraints.size());
786 }
787 print_constraint_types(m_equality_constraints);
788 if (m_inequality_constraints.size() == 1) {
789 slp::println("1 inequality constraint");
790 } else {
791 slp::println("{} inequality constraints",
792 m_inequality_constraints.size());
793 }
794 print_constraint_types(m_inequality_constraints);
795 }
796};
797
798extern template class EXPORT_TEMPLATE_DECLARE(SLEIPNIR_DLLEXPORT)
799Problem<double>;
800
801} // namespace slp
Definition intrusive_shared_ptr.hpp:27
Definition problem.hpp:66
VariableMatrix< Scalar > symmetric_decision_variable(int rows)
Definition problem.hpp:117
void subject_to(InequalityConstraints< Scalar > &&constraint)
Definition problem.hpp:228
void add_callback(F &&callback)
Definition problem.hpp:688
void subject_to(EqualityConstraints< Scalar > &&constraint)
Definition problem.hpp:206
ExpressionType equality_constraint_type() const
Definition problem.hpp:249
void maximize(Variable< Scalar > &&objective)
Definition problem.hpp:186
void add_persistent_callback(F &&callback)
Definition problem.hpp:707
void subject_to(const InequalityConstraints< Scalar > &constraint)
Definition problem.hpp:217
ExpressionType inequality_constraint_type() const
Definition problem.hpp:262
void minimize(const Variable< Scalar > &cost)
Definition problem.hpp:150
Problem() noexcept=default
Constructs the optimization problem.
VariableMatrix< Scalar > decision_variable(int rows, int cols=1)
Definition problem.hpp:90
void minimize(Variable< Scalar > &&cost)
Definition problem.hpp:161
void clear_callbacks()
Clears the registered callbacks.
Definition problem.hpp:693
void subject_to(const EqualityConstraints< Scalar > &constraint)
Definition problem.hpp:195
void maximize(const Variable< Scalar > &objective)
Definition problem.hpp:172
void add_callback(F &&callback)
Definition problem.hpp:669
ExpressionType cost_function_type() const
Definition problem.hpp:238
ExitStatus solve(const Options &options=Options{}, bool spy=false)
Definition problem.hpp:280
Variable< Scalar > decision_variable()
Definition problem.hpp:77
Definition variable.hpp:47
Solver options.
Definition options.hpp:13