Sleipnir C++ API
Loading...
Searching...
No Matches
problem.hpp
1// Copyright (c) Sleipnir contributors
2
3#pragma once
4
5#include <algorithm>
6#include <array>
7#include <cmath>
8#include <concepts>
9#include <functional>
10#include <iterator>
11#include <memory>
12#include <optional>
13#include <ranges>
14#include <utility>
15
16#include <Eigen/Core>
17#include <Eigen/SparseCore>
18#include <gch/small_vector.hpp>
19
20#include "sleipnir/autodiff/expression_type.hpp"
21#include "sleipnir/autodiff/gradient.hpp"
22#include "sleipnir/autodiff/hessian.hpp"
23#include "sleipnir/autodiff/jacobian.hpp"
24#include "sleipnir/autodiff/variable.hpp"
25#include "sleipnir/autodiff/variable_matrix.hpp"
26#include "sleipnir/optimization/solver/exit_status.hpp"
27#include "sleipnir/optimization/solver/interior_point.hpp"
28#include "sleipnir/optimization/solver/iteration_info.hpp"
29#include "sleipnir/optimization/solver/newton.hpp"
30#include "sleipnir/optimization/solver/options.hpp"
31#include "sleipnir/optimization/solver/sqp.hpp"
32#include "sleipnir/optimization/solver/util/bounds.hpp"
33#include "sleipnir/util/empty.hpp"
34#include "sleipnir/util/print.hpp"
35#include "sleipnir/util/print_diagnostics.hpp"
36#include "sleipnir/util/profiler.hpp"
37#include "sleipnir/util/spy.hpp"
38#include "sleipnir/util/symbol_exports.hpp"
39
40namespace slp {
41
65template <typename Scalar>
66class Problem {
67 public:
70
74 [[nodiscard]]
76 m_decision_variables.emplace_back();
77 return m_decision_variables.back();
78 }
79
85 [[nodiscard]]
86 VariableMatrix<Scalar> decision_variable(int rows, int cols = 1) {
87 m_decision_variables.reserve(m_decision_variables.size() + rows * cols);
88
89 VariableMatrix<Scalar> vars{detail::empty, rows, cols};
90
91 for (int row = 0; row < rows; ++row) {
92 for (int col = 0; col < cols; ++col) {
93 m_decision_variables.emplace_back();
94 vars[row, col] = m_decision_variables.back();
95 }
96 }
97
98 return vars;
99 }
100
110 [[nodiscard]]
112 // We only need to store the lower triangle of an n x n symmetric matrix;
113 // the other elements are duplicates. The lower triangle has (n² + n)/2
114 // elements.
115 //
116 // n
117 // Σ k = (n² + n)/2
118 // k=1
119 m_decision_variables.reserve(m_decision_variables.size() +
120 (rows * rows + rows) / 2);
121
122 VariableMatrix<Scalar> vars{detail::empty, rows, rows};
123
124 for (int row = 0; row < rows; ++row) {
125 for (int col = 0; col <= row; ++col) {
126 m_decision_variables.emplace_back();
127 vars[row, col] = m_decision_variables.back();
128 vars[col, row] = m_decision_variables.back();
129 }
130 }
131
132 return vars;
133 }
134
144 void minimize(const Variable<Scalar>& cost) { m_f = cost; }
145
155 void minimize(Variable<Scalar>&& cost) { m_f = std::move(cost); }
156
167 // Maximizing a cost function is the same as minimizing its negative
168 m_f = -objective;
169 }
170
181 // Maximizing a cost function is the same as minimizing its negative
182 m_f = -std::move(objective);
183 }
184
190 m_equality_constraints.reserve(m_equality_constraints.size() +
191 constraint.constraints.size());
192 std::ranges::copy(constraint.constraints,
193 std::back_inserter(m_equality_constraints));
194 }
195
201 m_equality_constraints.reserve(m_equality_constraints.size() +
202 constraint.constraints.size());
203 std::ranges::copy(constraint.constraints,
204 std::back_inserter(m_equality_constraints));
205 }
206
212 m_inequality_constraints.reserve(m_inequality_constraints.size() +
213 constraint.constraints.size());
214 std::ranges::copy(constraint.constraints,
215 std::back_inserter(m_inequality_constraints));
216 }
217
223 m_inequality_constraints.reserve(m_inequality_constraints.size() +
224 constraint.constraints.size());
225 std::ranges::copy(constraint.constraints,
226 std::back_inserter(m_inequality_constraints));
227 }
228
232 ExpressionType cost_function_type() const {
233 if (m_f) {
234 return m_f.value().type();
235 } else {
236 return ExpressionType::NONE;
237 }
238 }
239
243 ExpressionType equality_constraint_type() const {
244 if (!m_equality_constraints.empty()) {
245 return std::ranges::max(m_equality_constraints, {},
247 .type();
248 } else {
249 return ExpressionType::NONE;
250 }
251 }
252
256 ExpressionType inequality_constraint_type() const {
257 if (!m_inequality_constraints.empty()) {
258 return std::ranges::max(m_inequality_constraints, {},
260 .type();
261 } else {
262 return ExpressionType::NONE;
263 }
264 }
265
274 ExitStatus solve(const Options& options = Options{},
275 [[maybe_unused]] bool spy = false) {
276 using DenseVector = Eigen::Vector<Scalar, Eigen::Dynamic>;
277 using SparseMatrix = Eigen::SparseMatrix<Scalar>;
278 using SparseVector = Eigen::SparseVector<Scalar>;
279
280 // Create the initial value column vector
281 DenseVector x{m_decision_variables.size()};
282 for (size_t i = 0; i < m_decision_variables.size(); ++i) {
283 x[i] = m_decision_variables[i].value();
284 }
285
286 if (options.diagnostics) {
287 print_exit_conditions(options);
288 print_problem_analysis();
289 }
290
291 // Get the highest order constraint expression types
292 auto f_type = cost_function_type();
293 auto c_e_type = equality_constraint_type();
294 auto c_i_type = inequality_constraint_type();
295
296 // If the problem is empty or constant, there's nothing to do
297 if (f_type <= ExpressionType::CONSTANT &&
298 c_e_type <= ExpressionType::CONSTANT &&
299 c_i_type <= ExpressionType::CONSTANT) {
300#ifndef SLEIPNIR_DISABLE_DIAGNOSTICS
301 if (options.diagnostics) {
302 slp::println("\nInvoking no-op solver...\n");
303 }
304#endif
305 return ExitStatus::SUCCESS;
306 }
307
308 gch::small_vector<SetupProfiler> ad_setup_profilers;
309 ad_setup_profilers.emplace_back("setup").start();
310
311 VariableMatrix<Scalar> x_ad{m_decision_variables};
312
313 // Set up cost function
314 Variable f = m_f.value_or(Scalar(0));
315
316 // Set up gradient autodiff
317 ad_setup_profilers.emplace_back(" ↳ ∇f(x)").start();
318 Gradient g{f, x_ad};
319 ad_setup_profilers.back().stop();
320
321 [[maybe_unused]]
322 int num_decision_variables = m_decision_variables.size();
323 [[maybe_unused]]
324 int num_equality_constraints = m_equality_constraints.size();
325 [[maybe_unused]]
326 int num_inequality_constraints = m_inequality_constraints.size();
327
328 gch::small_vector<std::function<bool(const IterationInfo<Scalar>& info)>>
329 callbacks;
330 for (const auto& callback : m_iteration_callbacks) {
331 callbacks.emplace_back(callback);
332 }
333 for (const auto& callback : m_persistent_iteration_callbacks) {
334 callbacks.emplace_back(callback);
335 }
336
337 // Solve the optimization problem
338 ExitStatus status;
339 if (m_equality_constraints.empty() && m_inequality_constraints.empty()) {
340 if (options.diagnostics) {
341 slp::println("\nInvoking Newton solver...\n");
342 }
343
344 // Set up Lagrangian Hessian autodiff
345 ad_setup_profilers.emplace_back(" ↳ ∇²ₓₓL").start();
346 Hessian<Scalar, Eigen::Lower> H{f, x_ad};
347 ad_setup_profilers.back().stop();
348
349 ad_setup_profilers[0].stop();
350
351#ifndef SLEIPNIR_DISABLE_DIAGNOSTICS
352 // Sparsity pattern files written when spy flag is set
353 std::unique_ptr<Spy<Scalar>> H_spy;
354
355 if (spy) {
356 H_spy = std::make_unique<Spy<Scalar>>(
357 "H.spy", "Hessian", "Decision variables", "Decision variables",
358 num_decision_variables, num_decision_variables);
359 callbacks.push_back([&](const IterationInfo<Scalar>& info) -> bool {
360 H_spy->add(info.H);
361 return false;
362 });
363 }
364#endif
365
366 // Invoke Newton solver
367 status = newton<Scalar>(NewtonMatrixCallbacks<Scalar>{
368 [&](const DenseVector& x) -> Scalar {
369 x_ad.set_value(x);
370 return f.value();
371 },
372 [&](const DenseVector& x) -> SparseVector {
373 x_ad.set_value(x);
374 return g.value();
375 },
376 [&](const DenseVector& x) -> SparseMatrix {
377 x_ad.set_value(x);
378 return H.value();
379 }},
380 callbacks, options, x);
381 } else if (m_inequality_constraints.empty()) {
382 if (options.diagnostics) {
383 slp::println("\nInvoking SQP solver\n");
384 }
385
386 VariableMatrix<Scalar> c_e_ad{m_equality_constraints};
387
388 // Set up equality constraint Jacobian autodiff
389 ad_setup_profilers.emplace_back(" ↳ ∂cₑ/∂x").start();
390 Jacobian A_e{c_e_ad, x_ad};
391 ad_setup_profilers.back().stop();
392
393 // Set up Lagrangian
394 VariableMatrix<Scalar> y_ad(num_equality_constraints);
395 Variable L = f - y_ad.T() * c_e_ad;
396
397 // Set up Lagrangian Hessian autodiff
398 ad_setup_profilers.emplace_back(" ↳ ∇²ₓₓL").start();
399 Hessian<Scalar, Eigen::Lower> H{L, x_ad};
400 ad_setup_profilers.back().stop();
401
402 ad_setup_profilers[0].stop();
403
404#ifndef SLEIPNIR_DISABLE_DIAGNOSTICS
405 // Sparsity pattern files written when spy flag is set
406 std::unique_ptr<Spy<Scalar>> H_spy;
407 std::unique_ptr<Spy<Scalar>> A_e_spy;
408
409 if (spy) {
410 H_spy = std::make_unique<Spy<Scalar>>(
411 "H.spy", "Hessian", "Decision variables", "Decision variables",
412 num_decision_variables, num_decision_variables);
413 A_e_spy = std::make_unique<Spy<Scalar>>(
414 "A_e.spy", "Equality constraint Jacobian", "Constraints",
415 "Decision variables", num_equality_constraints,
416 num_decision_variables);
417 callbacks.push_back([&](const IterationInfo<Scalar>& info) -> bool {
418 H_spy->add(info.H);
419 A_e_spy->add(info.A_e);
420 return false;
421 });
422 }
423#endif
424
425 // Invoke SQP solver
426 status = sqp<Scalar>(
427 SQPMatrixCallbacks<Scalar>{
428 [&](const DenseVector& x) -> Scalar {
429 x_ad.set_value(x);
430 return f.value();
431 },
432 [&](const DenseVector& x) -> SparseVector {
433 x_ad.set_value(x);
434 return g.value();
435 },
436 [&](const DenseVector& x, const DenseVector& y) -> SparseMatrix {
437 x_ad.set_value(x);
438 y_ad.set_value(y);
439 return H.value();
440 },
441 [&](const DenseVector& x) -> DenseVector {
442 x_ad.set_value(x);
443 return c_e_ad.value();
444 },
445 [&](const DenseVector& x) -> SparseMatrix {
446 x_ad.set_value(x);
447 return A_e.value();
448 }},
449 callbacks, options, x);
450 } else {
451 if (options.diagnostics) {
452 slp::println("\nInvoking IPM solver...\n");
453 }
454
455 VariableMatrix<Scalar> c_e_ad{m_equality_constraints};
456 VariableMatrix<Scalar> c_i_ad{m_inequality_constraints};
457
458 // Set up equality constraint Jacobian autodiff
459 ad_setup_profilers.emplace_back(" ↳ ∂cₑ/∂x").start();
460 Jacobian A_e{c_e_ad, x_ad};
461 ad_setup_profilers.back().stop();
462
463 // Set up inequality constraint Jacobian autodiff
464 ad_setup_profilers.emplace_back(" ↳ ∂cᵢ/∂x").start();
465 Jacobian A_i{c_i_ad, x_ad};
466 ad_setup_profilers.back().stop();
467
468 // Set up Lagrangian
469 VariableMatrix<Scalar> y_ad(num_equality_constraints);
470 VariableMatrix<Scalar> z_ad(num_inequality_constraints);
471 Variable L = f - y_ad.T() * c_e_ad - z_ad.T() * c_i_ad;
472
473 // Set up Lagrangian Hessian autodiff
474 ad_setup_profilers.emplace_back(" ↳ ∇²ₓₓL").start();
475 Hessian<Scalar, Eigen::Lower> H{L, x_ad};
476 ad_setup_profilers.back().stop();
477
478 ad_setup_profilers[0].stop();
479
480#ifndef SLEIPNIR_DISABLE_DIAGNOSTICS
481 // Sparsity pattern files written when spy flag is set
482 std::unique_ptr<Spy<Scalar>> H_spy;
483 std::unique_ptr<Spy<Scalar>> A_e_spy;
484 std::unique_ptr<Spy<Scalar>> A_i_spy;
485
486 if (spy) {
487 H_spy = std::make_unique<Spy<Scalar>>(
488 "H.spy", "Hessian", "Decision variables", "Decision variables",
489 num_decision_variables, num_decision_variables);
490 A_e_spy = std::make_unique<Spy<Scalar>>(
491 "A_e.spy", "Equality constraint Jacobian", "Constraints",
492 "Decision variables", num_equality_constraints,
493 num_decision_variables);
494 A_i_spy = std::make_unique<Spy<Scalar>>(
495 "A_i.spy", "Inequality constraint Jacobian", "Constraints",
496 "Decision variables", num_inequality_constraints,
497 num_decision_variables);
498 callbacks.push_back([&](const IterationInfo<Scalar>& info) -> bool {
499 H_spy->add(info.H);
500 A_e_spy->add(info.A_e);
501 A_i_spy->add(info.A_i);
502 return false;
503 });
504 }
505#endif
506
507 const auto [bound_constraint_mask, bounds, conflicting_bound_indices] =
508 get_bounds<Scalar>(m_decision_variables, m_inequality_constraints,
509 A_i.value());
510 if (!conflicting_bound_indices.empty()) {
511 if (options.diagnostics) {
512 print_bound_constraint_global_infeasibility_error(
513 conflicting_bound_indices);
514 }
515 return ExitStatus::GLOBALLY_INFEASIBLE;
516 }
517
518#ifdef SLEIPNIR_ENABLE_BOUND_PROJECTION
519 project_onto_bounds(x, bounds);
520#endif
521 // Invoke interior-point method solver
522 status = interior_point<Scalar>(
523 InteriorPointMatrixCallbacks<Scalar>{
524 [&](const DenseVector& x) -> Scalar {
525 x_ad.set_value(x);
526 return f.value();
527 },
528 [&](const DenseVector& x) -> SparseVector {
529 x_ad.set_value(x);
530 return g.value();
531 },
532 [&](const DenseVector& x, const DenseVector& y,
533 const DenseVector& z) -> SparseMatrix {
534 x_ad.set_value(x);
535 y_ad.set_value(y);
536 z_ad.set_value(z);
537 return H.value();
538 },
539 [&](const DenseVector& x) -> DenseVector {
540 x_ad.set_value(x);
541 return c_e_ad.value();
542 },
543 [&](const DenseVector& x) -> SparseMatrix {
544 x_ad.set_value(x);
545 return A_e.value();
546 },
547 [&](const DenseVector& x) -> DenseVector {
548 x_ad.set_value(x);
549 return c_i_ad.value();
550 },
551 [&](const DenseVector& x) -> SparseMatrix {
552 x_ad.set_value(x);
553 return A_i.value();
554 }},
555 callbacks, options,
556#ifdef SLEIPNIR_ENABLE_BOUND_PROJECTION
557 bound_constraint_mask,
558#endif
559 x);
560 }
561
562 if (options.diagnostics) {
563 print_autodiff_diagnostics(ad_setup_profilers);
564 slp::println("\nExit: {}", status);
565 }
566
567 // Assign the solution to the original Variable instances
568 VariableMatrix<Scalar>{m_decision_variables}.set_value(x);
569
570 return status;
571 }
572
578 template <typename F>
579 requires requires(F callback, const IterationInfo<Scalar>& info) {
580 { callback(info) } -> std::same_as<void>;
581 }
583 m_iteration_callbacks.emplace_back(
584 [=, callback =
585 std::forward<F>(callback)](const IterationInfo<Scalar>& info) {
586 callback(info);
587 return false;
588 });
589 }
590
597 template <typename F>
598 requires requires(F callback, const IterationInfo<Scalar>& info) {
599 { callback(info) } -> std::same_as<bool>;
600 }
602 m_iteration_callbacks.emplace_back(std::forward<F>(callback));
603 }
604
606 void clear_callbacks() { m_iteration_callbacks.clear(); }
607
616 template <typename F>
617 requires requires(F callback, const IterationInfo<Scalar>& info) {
618 { callback(info) } -> std::same_as<bool>;
619 }
621 m_persistent_iteration_callbacks.emplace_back(std::forward<F>(callback));
622 }
623
624 private:
625 // The list of decision variables, which are the root of the problem's
626 // expression tree
627 gch::small_vector<Variable<Scalar>> m_decision_variables;
628
629 // The cost function: f(x)
630 std::optional<Variable<Scalar>> m_f;
631
632 // The list of equality constraints: cₑ(x) = 0
633 gch::small_vector<Variable<Scalar>> m_equality_constraints;
634
635 // The list of inequality constraints: cᵢ(x) ≥ 0
636 gch::small_vector<Variable<Scalar>> m_inequality_constraints;
637
638 // The iteration callbacks
639 gch::small_vector<std::function<bool(const IterationInfo<Scalar>& info)>>
640 m_iteration_callbacks;
641 gch::small_vector<std::function<bool(const IterationInfo<Scalar>& info)>>
642 m_persistent_iteration_callbacks;
643
644 void print_exit_conditions([[maybe_unused]] const Options& options) {
645 // Print possible exit conditions
646 slp::println("User-configured exit conditions:");
647 slp::println(" ↳ error below {}", options.tolerance);
648 if (!m_iteration_callbacks.empty() ||
649 !m_persistent_iteration_callbacks.empty()) {
650 slp::println(" ↳ iteration callback requested stop");
651 }
652 if (std::isfinite(options.max_iterations)) {
653 slp::println(" ↳ executed {} iterations", options.max_iterations);
654 }
655 if (std::isfinite(options.timeout.count())) {
656 slp::println(" ↳ {} elapsed", options.timeout);
657 }
658 }
659
660 void print_problem_analysis() {
661 constexpr std::array types{"no", "constant", "linear", "quadratic",
662 "nonlinear"};
663
664 // Print problem structure
665 slp::println("\nProblem structure:");
666 slp::println(" ↳ {} cost function",
667 types[std::to_underlying(cost_function_type())]);
668 slp::println(" ↳ {} equality constraints",
669 types[std::to_underlying(equality_constraint_type())]);
670 slp::println(" ↳ {} inequality constraints",
671 types[std::to_underlying(inequality_constraint_type())]);
672
673 if (m_decision_variables.size() == 1) {
674 slp::print("\n1 decision variable\n");
675 } else {
676 slp::print("\n{} decision variables\n", m_decision_variables.size());
677 }
678
679 auto print_constraint_types =
680 [](const gch::small_vector<Variable<Scalar>>& constraints) {
681 std::array<size_t, 5> counts{};
682 for (const auto& constraint : constraints) {
683 ++counts[std::to_underlying(constraint.type())];
684 }
685 for (const auto& [count, name] :
686 std::views::zip(counts, std::array{"empty", "constant", "linear",
687 "quadratic", "nonlinear"})) {
688 if (count > 0) {
689 slp::println(" ↳ {} {}", count, name);
690 }
691 }
692 };
693
694 // Print constraint types
695 if (m_equality_constraints.size() == 1) {
696 slp::println("1 equality constraint");
697 } else {
698 slp::println("{} equality constraints", m_equality_constraints.size());
699 }
700 print_constraint_types(m_equality_constraints);
701 if (m_inequality_constraints.size() == 1) {
702 slp::println("1 inequality constraint");
703 } else {
704 slp::println("{} inequality constraints",
705 m_inequality_constraints.size());
706 }
707 print_constraint_types(m_inequality_constraints);
708 }
709};
710
711extern template class EXPORT_TEMPLATE_DECLARE(SLEIPNIR_DLLEXPORT)
712Problem<double>;
713
714} // namespace slp
Definition intrusive_shared_ptr.hpp:27
Definition problem.hpp:66
VariableMatrix< Scalar > symmetric_decision_variable(int rows)
Definition problem.hpp:111
void subject_to(InequalityConstraints< Scalar > &&constraint)
Definition problem.hpp:222
void add_callback(F &&callback)
Definition problem.hpp:601
void subject_to(EqualityConstraints< Scalar > &&constraint)
Definition problem.hpp:200
ExpressionType equality_constraint_type() const
Definition problem.hpp:243
void maximize(Variable< Scalar > &&objective)
Definition problem.hpp:180
void add_persistent_callback(F &&callback)
Definition problem.hpp:620
void subject_to(const InequalityConstraints< Scalar > &constraint)
Definition problem.hpp:211
ExpressionType inequality_constraint_type() const
Definition problem.hpp:256
void minimize(const Variable< Scalar > &cost)
Definition problem.hpp:144
Problem() noexcept=default
Construct the optimization problem.
VariableMatrix< Scalar > decision_variable(int rows, int cols=1)
Definition problem.hpp:86
void minimize(Variable< Scalar > &&cost)
Definition problem.hpp:155
void clear_callbacks()
Clears the registered callbacks.
Definition problem.hpp:606
void subject_to(const EqualityConstraints< Scalar > &constraint)
Definition problem.hpp:189
void maximize(const Variable< Scalar > &objective)
Definition problem.hpp:166
void add_callback(F &&callback)
Definition problem.hpp:582
ExpressionType cost_function_type() const
Definition problem.hpp:232
ExitStatus solve(const Options &options=Options{}, bool spy=false)
Definition problem.hpp:274
Variable< Scalar > decision_variable()
Definition problem.hpp:75
Definition variable.hpp:47
Solver options.
Definition options.hpp:13