Sleipnir C++ API
Loading...
Searching...
No Matches
sqp.hpp
1// Copyright (c) Sleipnir contributors
2
3#pragma once
4
5#include <chrono>
6#include <cmath>
7#include <functional>
8#include <limits>
9#include <span>
10
11#include <Eigen/Core>
12#include <Eigen/SparseCore>
13#include <gch/small_vector.hpp>
14
15#include "sleipnir/optimization/solver/exit_status.hpp"
16#include "sleipnir/optimization/solver/iteration_info.hpp"
17#include "sleipnir/optimization/solver/options.hpp"
18#include "sleipnir/optimization/solver/sqp_matrix_callbacks.hpp"
19#include "sleipnir/optimization/solver/util/error_estimate.hpp"
20#include "sleipnir/optimization/solver/util/filter.hpp"
21#include "sleipnir/optimization/solver/util/is_locally_infeasible.hpp"
22#include "sleipnir/optimization/solver/util/kkt_error.hpp"
23#include "sleipnir/optimization/solver/util/regularized_ldlt.hpp"
24#include "sleipnir/util/assert.hpp"
25#include "sleipnir/util/print_diagnostics.hpp"
26#include "sleipnir/util/profiler.hpp"
27#include "sleipnir/util/scope_exit.hpp"
28#include "sleipnir/util/symbol_exports.hpp"
29
30// See docs/algorithms.md#Works_cited for citation definitions.
31
32namespace slp {
33
54template <typename Scalar>
55ExitStatus sqp(const SQPMatrixCallbacks<Scalar>& matrix_callbacks,
56 std::span<std::function<bool(const IterationInfo<Scalar>& info)>>
57 iteration_callbacks,
58 const Options& options,
59 Eigen::Vector<Scalar, Eigen::Dynamic>& x) {
60 using DenseVector = Eigen::Vector<Scalar, Eigen::Dynamic>;
61 using SparseMatrix = Eigen::SparseMatrix<Scalar>;
62 using SparseVector = Eigen::SparseVector<Scalar>;
63
65 struct Step {
67 DenseVector p_x;
69 DenseVector p_y;
70 };
71
72 using std::isfinite;
73
74 const auto solve_start_time = std::chrono::steady_clock::now();
75
76 gch::small_vector<SolveProfiler> solve_profilers;
77 solve_profilers.emplace_back("solver");
78 solve_profilers.emplace_back(" ↳ setup");
79 solve_profilers.emplace_back(" ↳ iteration");
80 solve_profilers.emplace_back(" ↳ feasibility ✓");
81 solve_profilers.emplace_back(" ↳ iter callbacks");
82 solve_profilers.emplace_back(" ↳ KKT matrix build");
83 solve_profilers.emplace_back(" ↳ KKT matrix decomp");
84 solve_profilers.emplace_back(" ↳ KKT system solve");
85 solve_profilers.emplace_back(" ↳ line search");
86 solve_profilers.emplace_back(" ↳ SOC");
87 solve_profilers.emplace_back(" ↳ next iter prep");
88 solve_profilers.emplace_back(" ↳ f(x)");
89 solve_profilers.emplace_back(" ↳ ∇f(x)");
90 solve_profilers.emplace_back(" ↳ ∇²ₓₓL");
91 solve_profilers.emplace_back(" ↳ cₑ(x)");
92 solve_profilers.emplace_back(" ↳ ∂cₑ/∂x");
93
94 auto& solver_prof = solve_profilers[0];
95 auto& setup_prof = solve_profilers[1];
96 auto& inner_iter_prof = solve_profilers[2];
97 auto& feasibility_check_prof = solve_profilers[3];
98 auto& iter_callbacks_prof = solve_profilers[4];
99 auto& kkt_matrix_build_prof = solve_profilers[5];
100 auto& kkt_matrix_decomp_prof = solve_profilers[6];
101 auto& kkt_system_solve_prof = solve_profilers[7];
102 auto& line_search_prof = solve_profilers[8];
103 auto& soc_prof = solve_profilers[9];
104 auto& next_iter_prep_prof = solve_profilers[10];
105
106 // Set up profiled matrix callbacks
107#ifndef SLEIPNIR_DISABLE_DIAGNOSTICS
108 auto& f_prof = solve_profilers[11];
109 auto& g_prof = solve_profilers[12];
110 auto& H_prof = solve_profilers[13];
111 auto& c_e_prof = solve_profilers[14];
112 auto& A_e_prof = solve_profilers[15];
113
114 SQPMatrixCallbacks<Scalar> matrices{
115 [&](const DenseVector& x) -> Scalar {
116 ScopedProfiler prof{f_prof};
117 return matrix_callbacks.f(x);
118 },
119 [&](const DenseVector& x) -> SparseVector {
120 ScopedProfiler prof{g_prof};
121 return matrix_callbacks.g(x);
122 },
123 [&](const DenseVector& x, const DenseVector& y) -> SparseMatrix {
124 ScopedProfiler prof{H_prof};
125 return matrix_callbacks.H(x, y);
126 },
127 [&](const DenseVector& x) -> DenseVector {
128 ScopedProfiler prof{c_e_prof};
129 return matrix_callbacks.c_e(x);
130 },
131 [&](const DenseVector& x) -> SparseMatrix {
132 ScopedProfiler prof{A_e_prof};
133 return matrix_callbacks.A_e(x);
134 }};
135#else
136 const auto& matrices = matrix_callbacks;
137#endif
138
139 solver_prof.start();
140 setup_prof.start();
141
142 Scalar f = matrices.f(x);
143 DenseVector c_e = matrices.c_e(x);
144
145 int num_decision_variables = x.rows();
146 int num_equality_constraints = c_e.rows();
147
148 // Check for overconstrained problem
149 if (num_equality_constraints > num_decision_variables) {
150 if (options.diagnostics) {
151 print_too_few_dofs_error(c_e);
152 }
153
154 return ExitStatus::TOO_FEW_DOFS;
155 }
156
157 SparseVector g = matrices.g(x);
158 SparseMatrix A_e = matrices.A_e(x);
159
160 DenseVector y = DenseVector::Zero(num_equality_constraints);
161
162 SparseMatrix H = matrices.H(x, y);
163
164 // Ensure matrix callback dimensions are consistent
165 slp_assert(g.rows() == num_decision_variables);
166 slp_assert(A_e.rows() == num_equality_constraints);
167 slp_assert(A_e.cols() == num_decision_variables);
168 slp_assert(H.rows() == num_decision_variables);
169 slp_assert(H.cols() == num_decision_variables);
170
171 // Check whether initial guess has finite f(xₖ) and cₑ(xₖ)
172 if (!isfinite(f) || !c_e.allFinite()) {
173 return ExitStatus::NONFINITE_INITIAL_COST_OR_CONSTRAINTS;
174 }
175
176 int iterations = 0;
177
178 Filter<Scalar> filter;
179
180 // Kept outside the loop so its storage can be reused
181 gch::small_vector<Eigen::Triplet<Scalar>> triplets;
182
183 RegularizedLDLT<Scalar> solver{num_decision_variables,
184 num_equality_constraints};
185
186 // Variables for determining when a step is acceptable
187 constexpr Scalar α_reduction_factor(0.5);
188 constexpr Scalar α_min(1e-7);
189
190 int full_step_rejected_counter = 0;
191
192 // Error estimate
193 Scalar E_0 = std::numeric_limits<Scalar>::infinity();
194
195 setup_prof.stop();
196
197 // Prints final solver diagnostics when the solver exits
198 scope_exit exit{[&] {
199 if (options.diagnostics) {
200 solver_prof.stop();
201 if (iterations > 0) {
202 print_bottom_iteration_diagnostics();
203 }
204 print_solver_diagnostics(solve_profilers);
205 }
206 }};
207
208 while (E_0 > Scalar(options.tolerance)) {
209 ScopedProfiler inner_iter_profiler{inner_iter_prof};
210 ScopedProfiler feasibility_check_profiler{feasibility_check_prof};
211
212 // Check for local equality constraint infeasibility
213 if (is_equality_locally_infeasible(A_e, c_e)) {
214 if (options.diagnostics) {
215 print_c_e_local_infeasibility_error(c_e);
216 }
217
218 return ExitStatus::LOCALLY_INFEASIBLE;
219 }
220
221 // Check for diverging iterates
222 if (x.template lpNorm<Eigen::Infinity>() > Scalar(1e10) || !x.allFinite()) {
223 return ExitStatus::DIVERGING_ITERATES;
224 }
225
226 feasibility_check_profiler.stop();
227 ScopedProfiler iter_callbacks_profiler{iter_callbacks_prof};
228
229 // Call iteration callbacks
230 for (const auto& callback : iteration_callbacks) {
231 if (callback({iterations, x, g, H, A_e, SparseMatrix{}})) {
232 return ExitStatus::CALLBACK_REQUESTED_STOP;
233 }
234 }
235
236 iter_callbacks_profiler.stop();
237 ScopedProfiler kkt_matrix_build_profiler{kkt_matrix_build_prof};
238
239 // lhs = [H Aₑᵀ]
240 // [Aₑ 0 ]
241 //
242 // Don't assign upper triangle because solver only uses lower triangle.
243 triplets.clear();
244 triplets.reserve(H.nonZeros() + A_e.nonZeros());
245 for (int col = 0; col < H.cols(); ++col) {
246 // Append column of H lower triangle in top-left quadrant
247 for (typename SparseMatrix::InnerIterator it{H, col}; it; ++it) {
248 triplets.emplace_back(it.row(), it.col(), it.value());
249 }
250 // Append column of Aₑ in bottom-left quadrant
251 for (typename SparseMatrix::InnerIterator it{A_e, col}; it; ++it) {
252 triplets.emplace_back(H.rows() + it.row(), it.col(), it.value());
253 }
254 }
255 SparseMatrix lhs(num_decision_variables + num_equality_constraints,
256 num_decision_variables + num_equality_constraints);
257 lhs.setFromSortedTriplets(triplets.begin(), triplets.end());
258
259 // rhs = −[∇f − Aₑᵀy]
260 // [ cₑ ]
261 DenseVector rhs{x.rows() + y.rows()};
262 rhs.segment(0, x.rows()) = -g + A_e.transpose() * y;
263 rhs.segment(x.rows(), y.rows()) = -c_e;
264
265 kkt_matrix_build_profiler.stop();
266 ScopedProfiler kkt_matrix_decomp_profiler{kkt_matrix_decomp_prof};
267
268 Step step;
269 constexpr Scalar α_max(1);
270 Scalar α(1);
271
272 // Solve the Newton-KKT system
273 //
274 // [H Aₑᵀ][ pˣ] = −[∇f − Aₑᵀy]
275 // [Aₑ 0 ][−pʸ] [ cₑ ]
276 if (solver.compute(lhs).info() != Eigen::Success) [[unlikely]] {
277 return ExitStatus::FACTORIZATION_FAILED;
278 }
279
280 kkt_matrix_decomp_profiler.stop();
281 ScopedProfiler kkt_system_solve_profiler{kkt_system_solve_prof};
282
283 auto compute_step = [&](Step& step) {
284 // p = [ pˣ]
285 // [−pʸ]
286 DenseVector p = solver.solve(rhs);
287 step.p_x = p.segment(0, x.rows());
288 step.p_y = -p.segment(x.rows(), y.rows());
289 };
290 compute_step(step);
291
292 kkt_system_solve_profiler.stop();
293 ScopedProfiler line_search_profiler{line_search_prof};
294
295 α = α_max;
296
297 // Loop until a step is accepted
298 while (1) {
299 DenseVector trial_x = x + α * step.p_x;
300 DenseVector trial_y = y + α * step.p_y;
301
302 Scalar trial_f = matrices.f(trial_x);
303 DenseVector trial_c_e = matrices.c_e(trial_x);
304
305 // If f(xₖ + αpₖˣ) or cₑ(xₖ + αpₖˣ) aren't finite, reduce step size
306 // immediately
307 if (!isfinite(trial_f) || !trial_c_e.allFinite()) {
308 // Reduce step size
309 α *= α_reduction_factor;
310
311 if (α < α_min) {
312 return ExitStatus::LINE_SEARCH_FAILED;
313 }
314 continue;
315 }
316
317 // Check whether filter accepts trial iterate
318 if (filter.try_add(FilterEntry{trial_f, trial_c_e}, α)) {
319 // Accept step
320 break;
321 }
322
323 Scalar prev_constraint_violation = c_e.template lpNorm<1>();
324 Scalar next_constraint_violation = trial_c_e.template lpNorm<1>();
325
326 // Second-order corrections
327 //
328 // If first trial point was rejected and constraint violation stayed the
329 // same or went up, apply second-order corrections
330 if (α == α_max &&
331 next_constraint_violation >= prev_constraint_violation) {
332 // Apply second-order corrections. See section 2.4 of [2].
333 auto soc_step = step;
334
335 Scalar α_soc = α;
336 DenseVector c_e_soc = c_e;
337
338 bool step_acceptable = false;
339 for (int soc_iteration = 0; soc_iteration < 5 && !step_acceptable;
340 ++soc_iteration) {
341 ScopedProfiler soc_profiler{soc_prof};
342
343 scope_exit soc_exit{[&] {
344 soc_profiler.stop();
345
346 if (options.diagnostics) {
347 print_iteration_diagnostics(
348 iterations,
349 step_acceptable ? IterationType::ACCEPTED_SOC
350 : IterationType::REJECTED_SOC,
351 soc_profiler.current_duration(),
352 error_estimate<Scalar>(g, A_e, trial_c_e, trial_y), trial_f,
353 trial_c_e.template lpNorm<1>(), Scalar(0), Scalar(0),
354 solver.hessian_regularization(), α_soc, Scalar(1),
355 α_reduction_factor, Scalar(1));
356 }
357 }};
358
359 // Rebuild Newton-KKT rhs with updated constraint values.
360 //
361 // rhs = −[∇f − Aₑᵀy]
362 // [ cₑˢᵒᶜ ]
363 //
364 // where cₑˢᵒᶜ = αc(xₖ) + c(xₖ + αpₖˣ)
365 c_e_soc = α_soc * c_e_soc + trial_c_e;
366 rhs.bottomRows(y.rows()) = -c_e_soc;
367
368 // Solve the Newton-KKT system
369 compute_step(soc_step);
370
371 trial_x = x + α_soc * soc_step.p_x;
372 trial_y = y + α_soc * soc_step.p_y;
373
374 trial_f = matrices.f(trial_x);
375 trial_c_e = matrices.c_e(trial_x);
376
377 // Constraint violation scale factor for second-order corrections
378 constexpr Scalar κ_soc(0.99);
379
380 // If constraint violation hasn't been sufficiently reduced, stop
381 // making second-order corrections
382 next_constraint_violation = trial_c_e.template lpNorm<1>();
383 if (next_constraint_violation > κ_soc * prev_constraint_violation) {
384 break;
385 }
386
387 // Check whether filter accepts trial iterate
388 if (filter.try_add(FilterEntry{trial_f, trial_c_e}, α)) {
389 step = soc_step;
390 α = α_soc;
391 step_acceptable = true;
392 }
393 }
394
395 if (step_acceptable) {
396 // Accept step
397 break;
398 }
399 }
400
401 // If we got here and α is the full step, the full step was rejected.
402 // Increment the full-step rejected counter to keep track of how many full
403 // steps have been rejected in a row.
404 if (α == α_max) {
405 ++full_step_rejected_counter;
406 }
407
408 // If the full step was rejected enough times in a row, reset the filter
409 // because it may be impeding progress.
410 //
411 // See section 3.2 case I of [2].
412 if (full_step_rejected_counter >= 4 &&
413 filter.max_constraint_violation >
414 filter.back().constraint_violation / Scalar(10)) {
415 filter.max_constraint_violation *= Scalar(0.1);
416 filter.reset();
417 continue;
418 }
419
420 // Reduce step size
421 α *= α_reduction_factor;
422
423 // If step size hit a minimum, check if the KKT error was reduced. If it
424 // wasn't, report line search failure.
425 if (α < α_min) {
426 Scalar current_kkt_error = kkt_error<Scalar>(g, A_e, c_e, y);
427
428 trial_x = x + α_max * step.p_x;
429 trial_y = y + α_max * step.p_y;
430
431 trial_c_e = matrices.c_e(trial_x);
432
433 Scalar next_kkt_error = kkt_error<Scalar>(
434 matrices.g(trial_x), matrices.A_e(trial_x), trial_c_e, trial_y);
435
436 // If the step using αᵐᵃˣ reduced the KKT error, accept it anyway
437 if (next_kkt_error <= Scalar(0.999) * current_kkt_error) {
438 α = α_max;
439
440 // Accept step
441 break;
442 }
443
444 return ExitStatus::LINE_SEARCH_FAILED;
445 }
446 }
447
448 line_search_profiler.stop();
449
450 // If full step was accepted, reset full-step rejected counter
451 if (α == α_max) {
452 full_step_rejected_counter = 0;
453 }
454
455 // xₖ₊₁ = xₖ + αₖpₖˣ
456 // yₖ₊₁ = yₖ + αₖpₖʸ
457 x += α * step.p_x;
458 y += α * step.p_y;
459
460 // Update autodiff for Jacobians and Hessian
461 f = matrices.f(x);
462 A_e = matrices.A_e(x);
463 g = matrices.g(x);
464 H = matrices.H(x, y);
465
466 ScopedProfiler next_iter_prep_profiler{next_iter_prep_prof};
467
468 c_e = matrices.c_e(x);
469
470 // Update the error estimate
471 E_0 = error_estimate<Scalar>(g, A_e, c_e, y);
472
473 next_iter_prep_profiler.stop();
474 inner_iter_profiler.stop();
475
476 if (options.diagnostics) {
477 print_iteration_diagnostics(iterations, IterationType::NORMAL,
478 inner_iter_profiler.current_duration(), E_0,
479 f, c_e.template lpNorm<1>(), Scalar(0),
480 Scalar(0), solver.hessian_regularization(), α,
481 α_max, α_reduction_factor, α);
482 }
483
484 ++iterations;
485
486 // Check for max iterations
487 if (iterations >= options.max_iterations) {
488 return ExitStatus::MAX_ITERATIONS_EXCEEDED;
489 }
490
491 // Check for max wall clock time
492 if (std::chrono::steady_clock::now() - solve_start_time > options.timeout) {
493 return ExitStatus::TIMEOUT;
494 }
495 }
496
497 return ExitStatus::SUCCESS;
498}
499
500extern template SLEIPNIR_DLLEXPORT ExitStatus
501sqp(const SQPMatrixCallbacks<double>& matrix_callbacks,
502 std::span<std::function<bool(const IterationInfo<double>& info)>>
503 iteration_callbacks,
504 const Options& options, Eigen::Vector<double, Eigen::Dynamic>& x);
505
506} // namespace slp