CoDiPack  2.2.0
A Code Differentiation Package
SciComp TU Kaiserslautern
Loading...
Searching...
No Matches
algorithms.hpp
1/*
2 * CoDiPack, a Code Differentiation Package
3 *
4 * Copyright (C) 2015-2024 Chair for Scientific Computing (SciComp), University of Kaiserslautern-Landau
5 * Homepage: http://www.scicomp.uni-kl.de
6 * Contact: Prof. Nicolas R. Gauger (codi@scicomp.uni-kl.de)
7 *
8 * Lead developers: Max Sagebaum, Johannes Blühdorn (SciComp, University of Kaiserslautern-Landau)
9 *
10 * This file is part of CoDiPack (http://www.scicomp.uni-kl.de/software/codi).
11 *
12 * CoDiPack is free software: you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, either version 3 of the
15 * License, or (at your option) any later version.
16 *
17 * CoDiPack is distributed in the hope that it will be useful,
18 * but WITHOUT ANY WARRANTY; without even the implied warranty
19 * of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
20 *
21 * See the GNU General Public License for more details.
22 * You should have received a copy of the GNU
23 * General Public License along with CoDiPack.
24 * If not, see <http://www.gnu.org/licenses/>.
25 *
26 * For other licensing options please contact us.
27 *
28 * Authors:
29 * - SciComp, University of Kaiserslautern-Landau:
30 * - Max Sagebaum
31 * - Johannes Blühdorn
32 * - Former members:
33 * - Tim Albring
34 */
35#pragma once
36
37#include "../config.h"
38#include "../expressions/lhsExpressionInterface.hpp"
39#include "../misc/exceptions.hpp"
40#include "../tapes/misc/tapeParameters.hpp"
41#include "../traits/gradientTraits.hpp"
42#include "data/dummy.hpp"
43#include "data/jacobian.hpp"
44#include "data/staticDummy.hpp"
45
47namespace codi {
48
67 template<typename T_Type, bool T_ActiveChecks = true>
68 struct Algorithms {
69 public:
70
72 using Type = CODI_DD(T_Type, CODI_DEFAULT_LHS_EXPRESSION);
73
74 static bool constexpr ActiveChecks = T_ActiveChecks;
75
76 using Tape = typename Type::Tape;
77 using Position = typename Tape::Position;
78 using Real = typename Type::Real;
79 using Identifier = typename Type::Identifier;
80 using Gradient = typename Type::Gradient;
81
83
85 enum class EvaluationType {
86 Forward,
87 Reverse
88 };
89
93 static CODI_INLINE EvaluationType getEvaluationChoice(size_t const inputs, size_t const outputs) {
94 if (inputs <= outputs) {
95 return EvaluationType::Forward;
96 } else {
97 return EvaluationType::Reverse;
98 }
99 }
100
140 template<typename Jac, bool keepState = true>
141 static CODI_INLINE void computeJacobian(Tape& tape, Position const& start, Position const& end,
142 Identifier const* input, size_t const inputSize, Identifier const* output,
143 size_t const outputSize, Jac& jac,
145 size_t constexpr gradDim = GT::dim;
146
147 // internally, automatic management is implemented in an optimized way that uses manual management
148 if (AdjointsManagement::Automatic == adjointsManagement) {
149 tape.resizeAdjointVector();
150 tape.beginUseAdjointVector();
151 }
152
153 EvaluationType evalType = getEvaluationChoice(inputSize, outputSize);
154 if (EvaluationType::Forward == evalType) {
155 for (size_t j = 0; j < inputSize; j += gradDim) {
156 setGradientOnIdentifier(tape, j, input, inputSize, typename GT::Real(1.0), AdjointsManagement::Manual);
157
158 if (keepState) {
159 tape.evaluateForwardKeepState(start, end, AdjointsManagement::Manual);
160 } else {
161 tape.evaluateForward(start, end, AdjointsManagement::Manual);
162 }
163
164 for (size_t i = 0; i < outputSize; i += 1) {
165 for (size_t curDim = 0; curDim < gradDim && j + curDim < inputSize; curDim += 1) {
166 jac(outputSize - i - 1, j + curDim) =
167 GT::at(tape.getGradient(output[outputSize - i - 1], AdjointsManagement::Manual), curDim);
168 if (Gradient() != output[i]) {
169 GT::at(tape.gradient(output[outputSize - i - 1], AdjointsManagement::Manual), curDim) =
170 typename GT::Real();
171 }
172 }
173 }
174
175 setGradientOnIdentifier(tape, j, input, inputSize, typename GT::Real(), AdjointsManagement::Manual);
176 }
177
178 tape.clearAdjoints(end, start, AdjointsManagement::Manual);
179
180 } else if (EvaluationType::Reverse == evalType) {
181 for (size_t i = 0; i < outputSize; i += gradDim) {
182 setGradientOnIdentifier(tape, i, output, outputSize, typename GT::Real(1.0), AdjointsManagement::Manual);
183
184 if (keepState) {
185 tape.evaluateKeepState(end, start, AdjointsManagement::Manual);
186 } else {
187 tape.evaluate(end, start, AdjointsManagement::Manual);
188 }
189
190 for (size_t j = 0; j < inputSize; j += 1) {
191 for (size_t curDim = 0; curDim < gradDim && i + curDim < outputSize; curDim += 1) {
192 jac(i + curDim, j) = GT::at(tape.getGradient(input[j], AdjointsManagement::Manual), curDim);
193 GT::at(tape.gradient(input[j], AdjointsManagement::Manual), curDim) = typename GT::Real();
194 }
195 }
196
197 setGradientOnIdentifier(tape, i, output, outputSize, typename GT::Real(), AdjointsManagement::Manual);
198
200 tape.clearAdjoints(end, start, AdjointsManagement::Manual);
201 }
202 }
203 } else {
204 CODI_EXCEPTION("Evaluation mode not implemented. Mode is: %d.", (int)evalType);
205 }
206
207 if (AdjointsManagement::Automatic == adjointsManagement) {
208 tape.endUseAdjointVector();
209 }
210 }
211
212 // clang-format off
216 // clang-format on
217 template<typename Jac>
218 static CODI_INLINE void computeJacobian(Position const& start, Position const& end, Identifier const* input,
219 size_t const inputSize, Identifier const* output, size_t const outputSize,
220 Jac& jac,
222 computeJacobian(Type::getTape(), start, end, input, inputSize, output, outputSize, jac, adjointsManagement);
223 }
224
243 template<typename Hes, typename Jac = DummyJacobian>
244 static CODI_INLINE void computeHessianPrimalValueTape(Tape& tape, Position const& start, Position const& end,
245 Identifier const* input, size_t const inputSize,
246 Identifier const* output, size_t const outputSize, Hes& hes,
248 EvaluationType evalType = getEvaluationChoice(inputSize, outputSize);
249 if (EvaluationType::Forward == evalType) {
250 computeHessianPrimalValueTapeForward(tape, start, end, input, inputSize, output, outputSize, hes, jac);
251 } else if (EvaluationType::Reverse == evalType) {
252 computeHessianPrimalValueTapeReverse(tape, start, end, input, inputSize, output, outputSize, hes, jac);
253 } else {
254 CODI_EXCEPTION("Evaluation mode not implemented. Mode is: %d.", (int)evalType);
255 }
256 }
257
269 template<typename Hes, typename Jac = DummyJacobian>
271 Position const& end, Identifier const* input,
272 size_t const inputSize, Identifier const* output,
273 size_t const outputSize, Hes& hes,
275 using GT1st = GT;
276 size_t constexpr gradDim1st = GT1st::dim;
277 using GT2nd = GradientTraits::TraitsImplementation<CODI_DD(typename Real::Gradient, double)>;
278 size_t constexpr gradDim2nd = GT2nd::dim;
279
280 // Assume that the tape was just recorded.
281 tape.revertPrimals(start);
282
283 for (size_t j = 0; j < inputSize; j += gradDim2nd) {
284 setGradient2ndOnIdentifier(tape, j, input, inputSize, typename GT2nd::Real(1.0));
285
286 // The k = j init is no problem, it will evaluate slightly more elements around the diagonal.
287 for (size_t k = j; k < inputSize; k += gradDim1st) {
288 setGradientOnIdentifier(tape, k, input, inputSize, typename GT1st::Real(1.0));
289
290 tape.evaluateForward(start, end);
291
292 for (size_t i = 0; i < outputSize; i += 1) {
293 for (size_t vecPos1st = 0; vecPos1st < gradDim1st && k + vecPos1st < inputSize; vecPos1st += 1) {
294 for (size_t vecPos2nd = 0; vecPos2nd < gradDim2nd && j + vecPos2nd < inputSize; vecPos2nd += 1) {
295 hes(i, j + vecPos2nd, k + vecPos1st) =
296 GT2nd::at(GT1st::at(tape.getGradient(output[i]), vecPos1st).gradient(), vecPos2nd);
297 hes(i, k + vecPos1st, j + vecPos2nd) = hes(i, j + vecPos2nd, k + vecPos1st); // Symmetry
298 }
299 }
300
301 if (j == 0) {
302 for (size_t vecPos = 0; vecPos < gradDim1st && k + vecPos < inputSize; vecPos += 1) {
303 jac(i, k + vecPos) = GT1st::at(tape.getGradient(output[i]), vecPos).value();
304 }
305 }
306 }
307
308 setGradientOnIdentifier(tape, k, input, inputSize, typename GT1st::Real());
309 }
310
311 setGradient2ndOnIdentifier(tape, j, input, inputSize, typename GT2nd::Real());
312 }
313 }
314
326 template<typename Hes, typename Jac = DummyJacobian>
328 Position const& end, Identifier const* input,
329 size_t const inputSize, Identifier const* output,
330 size_t const outputSize, Hes& hes,
332 using GT1st = GT;
333 size_t constexpr gradDim1st = GT1st::dim;
334 using GT2nd = GradientTraits::TraitsImplementation<CODI_DD(typename Real::Gradient, double)>;
335 size_t constexpr gradDim2nd = GT2nd::dim;
336
337 // Assume that the tape was just recorded.
338 tape.revertPrimals(start);
339
340 for (size_t j = 0; j < inputSize; j += gradDim2nd) {
341 setGradient2ndOnIdentifier(tape, j, input, inputSize, typename GT2nd::Real(1.0));
342
343 // Propagate the new derivative information.
344 tape.evaluatePrimal(start, end);
345
346 for (size_t i = 0; i < outputSize; i += gradDim1st) {
347 setGradientOnIdentifier(tape, i, output, outputSize, typename GT1st::Real(1.0));
348
349 // Propagate the derivatives backward for second order derivatives.
350 tape.evaluateKeepState(end, start);
351
352 for (size_t k = 0; k < inputSize; k += 1) {
353 for (size_t vecPos1st = 0; vecPos1st < gradDim1st && i + vecPos1st < outputSize; vecPos1st += 1) {
354 for (size_t vecPos2nd = 0; vecPos2nd < gradDim2nd && j + vecPos2nd < inputSize; vecPos2nd += 1) {
355 hes(i + vecPos1st, j + vecPos2nd, k) =
356 GT2nd::at(GT1st::at(tape.gradient(input[k]), vecPos1st).gradient(), vecPos2nd);
357 }
358 }
359
360 if (j == 0) {
361 for (size_t vecPos1st = 0; vecPos1st < gradDim1st && i + vecPos1st < outputSize; vecPos1st += 1) {
362 jac(i + vecPos1st, k) = GT1st::at(tape.getGradient(input[k]), vecPos1st).value();
363 }
364 }
365
366 tape.gradient(input[k]) = Gradient();
367 }
368
369 setGradientOnIdentifier(tape, i, output, outputSize, typename GT1st::Real());
370
372 tape.clearAdjoints(end, start);
373 }
374 }
375
376 setGradient2ndOnIdentifier(tape, j, input, inputSize, typename GT2nd::Real());
377
378 if (j + gradDim2nd < inputSize) {
379 tape.revertPrimals(start);
380 }
381 }
382 }
383
401 template<typename Func, typename VecIn, typename VecOut, typename Hes, typename Jac = DummyJacobian>
402 static CODI_INLINE void computeHessian(Func func, VecIn& input, VecOut& output, Hes& hes,
404 EvaluationType evalType = getEvaluationChoice(input.size(), output.size());
405 if (EvaluationType::Forward == evalType) {
406 computeHessianForward(func, input, output, hes, jac);
407 } else if (EvaluationType::Reverse == evalType) {
408 computeHessianReverse(func, input, output, hes, jac);
409 } else {
410 CODI_EXCEPTION("Evaluation mode not implemented. Mode is: %d.", (int)evalType);
411 }
412 }
413
427 template<typename Func, typename VecIn, typename VecOut, typename Hes, typename Jac = DummyJacobian>
428 static CODI_INLINE void computeHessianForward(Func func, VecIn& input, VecOut& output, Hes& hes,
430 using GT1st = GT;
431 size_t constexpr gradDim1st = GT1st::dim;
432 using GT2nd = GradientTraits::TraitsImplementation<CODI_DD(typename Real::Gradient, double)>;
433 size_t constexpr gradDim2nd = GT2nd::dim;
434
435 Tape& tape = Type::getTape();
436
437 for (size_t j = 0; j < input.size(); j += gradDim2nd) {
438 setGradient2ndOnCoDiValue(j, input.data(), input.size(), typename GT2nd::Real(1.0));
439
440 // Propagate the new derivative information.
441 recordTape(func, input, output);
442
443 // The k = j init is no problem, it will evaluate slightly more elements around the diagonal.
444 for (size_t k = j; k < input.size(); k += gradDim1st) {
445 setGradientOnCoDiValue(tape, k, input.data(), input.size(), typename GT1st::Real(1.0));
446
447 // Propagate the derivatives forward for second order derivatives.
448 tape.evaluateForwardKeepState(tape.getZeroPosition(), tape.getPosition());
449
450 for (size_t i = 0; i < output.size(); i += 1) {
451 for (size_t vecPos1st = 0; vecPos1st < gradDim1st && k + vecPos1st < input.size(); vecPos1st += 1) {
452 for (size_t vecPos2nd = 0; vecPos2nd < gradDim2nd && j + vecPos2nd < input.size(); vecPos2nd += 1) {
453 hes(i, j + vecPos2nd, k + vecPos1st) = GT2nd::at(
454 GT1st::at(tape.getGradient(output[i].getIdentifier()), vecPos1st).gradient(), vecPos2nd);
455 hes(i, k + vecPos1st, j + vecPos2nd) = hes(i, j + vecPos2nd, k + vecPos1st); // Symmetry
456 }
457 }
458
459 if (j == 0) {
460 for (size_t vecPos = 0; vecPos < gradDim1st && k + vecPos < input.size(); vecPos += 1) {
461 jac(i, k + vecPos) = GT1st::at(tape.getGradient(output[i].getIdentifier()), vecPos).value();
462 }
463 }
464 }
465
466 setGradientOnCoDiValue(tape, k, input.data(), input.size(), typename GT1st::Real());
467 }
468
469 setGradient2ndOnCoDiValue(j, input.data(), input.size(), typename GT2nd::Real());
470
471 tape.reset();
472 }
473 }
474
488 template<typename Func, typename VecIn, typename VecOut, typename Hes, typename Jac = DummyJacobian>
489 static CODI_INLINE void computeHessianReverse(Func func, VecIn& input, VecOut& output, Hes& hes,
491 using GT1st = GT;
492 size_t constexpr gradDim1st = GT1st::dim;
493 using GT2nd = GradientTraits::TraitsImplementation<CODI_DD(typename Real::Gradient, double)>;
494 size_t constexpr gradDim2nd = GT2nd::dim;
495
496 Tape& tape = Type::getTape();
497
498 for (size_t j = 0; j < input.size(); j += gradDim2nd) {
499 setGradient2ndOnCoDiValue(j, input.data(), input.size(), typename GT2nd::Real(1.0));
500
501 // Propagate the new derivative information.
502 recordTape(func, input, output);
503
504 for (size_t i = 0; i < output.size(); i += gradDim1st) {
505 setGradientOnCoDiValue(tape, i, output.data(), output.size(), typename GT1st::Real(1.0));
506
507 // Propagate the derivatives backward for second order derivatives.
508 tape.evaluateKeepState(tape.getPosition(), tape.getZeroPosition());
509
510 for (size_t k = 0; k < input.size(); k += 1) {
511 for (size_t vecPos1st = 0; vecPos1st < gradDim1st && i + vecPos1st < output.size(); vecPos1st += 1) {
512 for (size_t vecPos2nd = 0; vecPos2nd < gradDim2nd && j + vecPos2nd < input.size(); vecPos2nd += 1) {
513 hes(i + vecPos1st, j + vecPos2nd, k) =
514 GT2nd::at(GT1st::at(tape.gradient(input[k].getIdentifier()), vecPos1st).gradient(), vecPos2nd);
515 }
516 }
517
518 if (j == 0) {
519 for (size_t vecPos1st = 0; vecPos1st < gradDim1st && i + vecPos1st < output.size(); vecPos1st += 1) {
520 jac(i + vecPos1st, k) = GT1st::at(tape.getGradient(input[k].getIdentifier()), vecPos1st).value();
521 }
522 }
523
524 tape.gradient(input[k].getIdentifier()) = Gradient();
525 }
526
527 setGradientOnCoDiValue(tape, i, output.data(), output.size(), typename GT1st::Real());
528
530 tape.clearAdjoints(tape.getPosition(), tape.getZeroPosition());
531 }
532 }
533
534 setGradient2ndOnCoDiValue(j, input.data(), input.size(), typename GT2nd::Real());
535
536 tape.reset();
537 }
538 }
539
540 private:
541
548 template<typename T>
549 static CODI_INLINE void setGradientOnIdentifier(
550 Tape& tape, size_t const pos, Identifier const* identifiers, size_t const size, T value,
552 size_t constexpr gradDim = GT::dim;
553
554 for (size_t curDim = 0; curDim < gradDim && pos + curDim < size; curDim += 1) {
555 if (CODI_ENABLE_CHECK(ActiveChecks, 0 != identifiers[pos + curDim])) {
556 GT::at(tape.gradient(identifiers[pos + curDim], adjointsManagement), curDim) = value;
557 }
558 }
559 }
560
562 template<typename T>
563 static CODI_INLINE void setGradient2ndOnIdentifier(Tape& tape, size_t const pos, Identifier const* identifiers,
564 size_t const size, T value) {
565 using GT2nd = GradientTraits::TraitsImplementation<CODI_DD(typename Real::Gradient, double)>;
566 size_t constexpr gradDim2nd = GT2nd::dim;
567
568 for (size_t curDim = 0; curDim < gradDim2nd && pos + curDim < size; curDim += 1) {
569 // No activity check on the identifier required since forward types are used.
570 GT2nd::at(tape.primal(identifiers[pos + curDim]).gradient(), curDim) = value;
571 }
572 }
573
580 template<typename T>
581 static CODI_INLINE void setGradientOnCoDiValue(Tape& tape, size_t const pos, Type* identifiers, size_t const size,
582 T value) {
583 size_t constexpr gradDim = GT::dim;
584
585 for (size_t curDim = 0; curDim < gradDim && pos + curDim < size; curDim += 1) {
586 if (CODI_ENABLE_CHECK(ActiveChecks, 0 != identifiers[pos + curDim].getIdentifier())) {
587 GT::at(tape.gradient(identifiers[pos + curDim].getIdentifier()), curDim) = value;
588 }
589 }
590 }
591
593 template<typename T>
594 static CODI_INLINE void setGradient2ndOnCoDiValue(size_t const pos, Type* identifiers, size_t const size,
595 T value) {
596 using GT2nd = GradientTraits::TraitsImplementation<CODI_DD(typename Real::Gradient, double)>;
597 size_t constexpr gradDim2nd = GT2nd::dim;
598
599 for (size_t curDim = 0; curDim < gradDim2nd && pos + curDim < size; curDim += 1) {
600 // No activity check on the identifier required since forward types are used.
601 GT2nd::at(identifiers[pos + curDim].value().gradient(), curDim) = value;
602 }
603 }
604
606 template<typename Func, typename VecIn, typename VecOut>
607 static CODI_INLINE void recordTape(Func func, VecIn& input, VecOut& output) {
608 Tape& tape = Type::getTape();
609 tape.setActive();
610 for (size_t curIn = 0; curIn < input.size(); curIn += 1) {
611 tape.registerInput(input[curIn]);
612 }
613
614 func(input, output);
615
616 for (size_t curOut = 0; curOut < output.size(); curOut += 1) {
617 tape.registerOutput(output[curOut]);
618 }
619 tape.setPassive();
620 }
621 };
622
623}
#define CODI_INLINE
See codi::Config::ForcedInlines.
Definition: config.h:457
#define CODI_DD(Type, Default)
Abbreviation for CODI_DECLARE_DEFAULT.
Definition: macros.hpp:94
#define CODI_ENABLE_CHECK(option, condition)
Definition: macros.hpp:53
bool constexpr ReversalZeroesAdjoints
With a linear index management, control if adjoints are set to zero during reversal.
Definition: config.h:289
DataExtraction< Type >::Identifier getIdentifier(Type const &v)
Extract the identifiers from a type of aggregated active types.
Definition: realTraits.hpp:216
CoDiPack - Code Differentiation Package.
Definition: codi.hpp:90
AdjointsManagement
Policies for management of the tape's interal adjoints.
Definition: tapeParameters.hpp:98
@ Automatic
Manage internal adjoints automatically, including locking, bounds checking, and resizing.
@ Manual
Do not perform any bounds checking, locking, or resizing.
typename Tape::Gradient Gradient
See LhsExpressionInterface.
Definition: activeTypeBase.hpp:79
Basic algorithms for tape evaluation in CoDiPack.
Definition: algorithms.hpp:68
typename Type::Gradient Gradient
See LhsExpressionInterface.
Definition: algorithms.hpp:80
EvaluationType
Evaluation modes for the derivative computation.
Definition: algorithms.hpp:85
typename Tape::Position Position
See LhsExpressionInterface.
Definition: algorithms.hpp:77
T_Type Type
See Algorithms.
Definition: algorithms.hpp:72
static void computeHessianPrimalValueTapeForward(Tape &tape, Position const &start, Position const &end, Identifier const *input, size_t const inputSize, Identifier const *output, size_t const outputSize, Hes &hes, Jac &jac=StaticDummy< DummyJacobian >::dummy)
Forward version of the Hessian computation.
Definition: algorithms.hpp:270
static void computeJacobian(Position const &start, Position const &end, Identifier const *input, size_t const inputSize, Identifier const *output, size_t const outputSize, Jac &jac, AdjointsManagement adjointsManagement=AdjointsManagement::Automatic)
Compute the Jacobian with multiple tape sweeps. This method uses the global tape for the Jacobian...
Definition: algorithms.hpp:218
static bool constexpr ActiveChecks
See Algorithms.
Definition: algorithms.hpp:74
static void computeHessian(Func func, VecIn &input, VecOut &output, Hes &hes, Jac &jac=StaticDummy< DummyJacobian >::dummy)
Compute the Hessian with multiple tape recordings and sweeps.
Definition: algorithms.hpp:402
typename Type::Tape Tape
See LhsExpressionInterface.
Definition: algorithms.hpp:76
GradientTraits::TraitsImplementation< Gradient > GT
Shortcut for traits of gradient.
Definition: algorithms.hpp:82
typename Type::Identifier Identifier
See LhsExpressionInterface.
Definition: algorithms.hpp:79
static void computeHessianForward(Func func, VecIn &input, VecOut &output, Hes &hes, Jac &jac=StaticDummy< DummyJacobian >::dummy)
Forward version of the Hessian computation with a function object.
Definition: algorithms.hpp:428
static EvaluationType getEvaluationChoice(size_t const inputs, size_t const outputs)
Definition: algorithms.hpp:93
typename Type::Real Real
See LhsExpressionInterface.
Definition: algorithms.hpp:78
static void computeHessianPrimalValueTape(Tape &tape, Position const &start, Position const &end, Identifier const *input, size_t const inputSize, Identifier const *output, size_t const outputSize, Hes &hes, Jac &jac=StaticDummy< DummyJacobian >::dummy)
Compute the Hessian with multiple tape sweeps.
Definition: algorithms.hpp:244
static void computeJacobian(Tape &tape, Position const &start, Position const &end, Identifier const *input, size_t const inputSize, Identifier const *output, size_t const outputSize, Jac &jac, AdjointsManagement adjointsManagement=AdjointsManagement::Automatic)
Compute the Jacobian with multiple tape sweeps.
Definition: algorithms.hpp:141
static void computeHessianReverse(Func func, VecIn &input, VecOut &output, Hes &hes, Jac &jac=StaticDummy< DummyJacobian >::dummy)
Reverse version of the Hessian computation with a function object.
Definition: algorithms.hpp:489
static void computeHessianPrimalValueTapeReverse(Tape &tape, Position const &start, Position const &end, Identifier const *input, size_t const inputSize, Identifier const *output, size_t const outputSize, Hes &hes, Jac &jac=StaticDummy< DummyJacobian >::dummy)
Reverse version of the Hessian computation.
Definition: algorithms.hpp:327
Common traits for all types used as gradients.
Definition: gradientTraits.hpp:64
static size_t constexpr dim
Number of dimensions this gradient value has.
Definition: gradientTraits.hpp:70
static Real & at(Gradient &gradient, size_t dim)
Get the entry at the given index.
Definition: gradientTraits.hpp:73
Gradient Real
The base value used in the gradient entries.
Definition: gradientTraits.hpp:68
Static dummy objects for e.g. default reference arguments.
Definition: staticDummy.hpp:42