forked from sammy-tri/drake
-
Notifications
You must be signed in to change notification settings - Fork 0
/
autodiff_gradient.h
249 lines (220 loc) · 10.3 KB
/
autodiff_gradient.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
/** @file
Utilities that relate simultaneously to both autodiff matrices and
gradient matrices. */
#pragma once
#include <algorithm>
#include <optional>
#include <Eigen/Dense>
#include <fmt/format.h>
#include "drake/common/drake_assert.h"
#include "drake/common/unused.h"
#include "drake/math/autodiff.h"
#include "drake/math/gradient.h"
namespace drake {
namespace math {
/** Extracts the `derivatives()` portion from a matrix of AutoDiffScalar
entries. (Each entry contains a value and derivatives.)
@param auto_diff_matrix An object whose Eigen type represents a matrix of
AutoDiffScalar entries.
@param num_derivatives (Optional) The number of derivatives to return in case
the input matrix has none, which we interpret as `num_derivatives` zeroes.
If `num_derivatives` is supplied and the input matrix has derivatives, the
sizes must match.
@retval gradient_matrix An Eigen::Matrix with number of rows equal to the
total size (rows x cols) of the input matrix and number of columns equal
to the number of derivatives. Each output row corresponds to one entry of
the input matrix, in input row order.
@tparam Derived An Eigen type representing a matrix with AutoDiffScalar
entries. The type will be inferred from the type of the `auto_diff_matrix`
parameter at the call site.
@throws std::exception if the input matrix has elements with inconsistent,
non-zero numbers of derivatives.
@throws std::exception if `num_derivatives` is specified but the input matrix
has a different, non-zero number of derivatives.*/
template <typename Derived>
Eigen::Matrix<typename Derived::Scalar::Scalar, Derived::SizeAtCompileTime,
Eigen::Dynamic>
ExtractGradient(const Eigen::MatrixBase<Derived>& auto_diff_matrix,
std::optional<int> num_derivatives = {}) {
// Entries in an AutoDiff matrix must all have the same number of derivatives,
// or 0-length derivatives in which case they are interpreted as all-zero.
int num_derivatives_from_matrix = 0;
for (int i = 0; i < auto_diff_matrix.size(); ++i) {
const int entry_num_derivs =
static_cast<int>(auto_diff_matrix(i).derivatives().size());
if (entry_num_derivs == 0) continue; // Always OK.
if (num_derivatives_from_matrix != 0 &&
entry_num_derivs != num_derivatives_from_matrix) {
throw std::logic_error(fmt::format(
"ExtractGradient(): Input matrix has elements with inconsistent,"
" non-zero numbers of derivatives ({} and {}).",
num_derivatives_from_matrix, entry_num_derivs));
}
num_derivatives_from_matrix = entry_num_derivs;
}
if (!num_derivatives.has_value()) {
num_derivatives = num_derivatives_from_matrix;
} else if (num_derivatives_from_matrix != 0 &&
num_derivatives_from_matrix != *num_derivatives) {
throw std::logic_error(fmt::format(
"ExtractGradient(): Input matrix has {} derivatives, but"
" num_derivatives was specified as {}. Either the input matrix should"
" have zero derivatives, or the number should match num_derivatives.",
num_derivatives_from_matrix, *num_derivatives));
}
Eigen::Matrix<typename Derived::Scalar::Scalar, Derived::SizeAtCompileTime,
Eigen::Dynamic>
gradient(auto_diff_matrix.size(), *num_derivatives);
if (gradient.size() == 0) {
return gradient;
}
for (int row = 0; row < auto_diff_matrix.rows(); ++row) {
for (int col = 0; col < auto_diff_matrix.cols(); ++col) {
auto gradient_row =
gradient.row(row + col * auto_diff_matrix.rows()).transpose();
if (auto_diff_matrix(row, col).derivatives().size() == 0) {
gradient_row.setZero();
} else {
gradient_row = auto_diff_matrix(row, col).derivatives();
}
}
}
return gradient;
}
/** Initializes an AutoDiff matrix given a matrix of values and a gradient
matrix.
@param[in] value The value matrix. Will be accessed with a single index.
@param[in] gradient The gradient matrix. The number of rows must match the
total size (nrow x ncol) of the value matrix. Derivatives of value(j) should
be stored in row j of the gradient matrix.
@param[out] auto_diff_matrix The matrix of AutoDiffScalars. Will be resized as
needed to have the same dimensions as the value matrix.
@exclude_from_pydrake_mkdoc{Not bound in pydrake.} */
template <typename DerivedValue, typename DerivedGradient,
typename DerivedAutoDiff>
void InitializeAutoDiff(
const Eigen::MatrixBase<DerivedValue>& value,
const Eigen::MatrixBase<DerivedGradient>& gradient,
Eigen::MatrixBase<DerivedAutoDiff>* auto_diff_matrix) {
// Any fixed-size dimensions of the parameters must be consistent in
// corresponding dimensions. Any dynamic-size dimensions must be dynamic
// in any corresponding dimensions.
static_assert(static_cast<int>(DerivedValue::SizeAtCompileTime) ==
static_cast<int>(DerivedGradient::RowsAtCompileTime),
"gradient has wrong number of rows at compile time");
using ExpectedAutoDiffType =
AutoDiffMatrixType<DerivedValue, DerivedGradient::ColsAtCompileTime>;
static_assert(static_cast<int>(ExpectedAutoDiffType::RowsAtCompileTime) ==
static_cast<int>(DerivedAutoDiff::RowsAtCompileTime),
"auto diff matrix has wrong number of rows at compile time");
static_assert(static_cast<int>(ExpectedAutoDiffType::ColsAtCompileTime) ==
static_cast<int>(DerivedAutoDiff::ColsAtCompileTime),
"auto diff matrix has wrong number of columns at compile time");
// Verify that the Scalar types of the Value matrix and AutoDiff result
// are the same.
static_assert(std::is_same_v<typename DerivedAutoDiff::Scalar,
typename ExpectedAutoDiffType::Scalar>,
"wrong auto diff scalar type");
DRAKE_DEMAND(auto_diff_matrix != nullptr);
DRAKE_DEMAND(value.size() == gradient.rows() &&
"gradient has wrong number of rows at runtime");
using Index = typename Eigen::MatrixBase<DerivedGradient>::Index;
// Can't resize() a MatrixBase -- downcast to the actual type.
DerivedAutoDiff& auto_diff = *static_cast<DerivedAutoDiff*>(auto_diff_matrix);
auto_diff.resize(value.rows(), value.cols());
auto num_derivs = gradient.cols();
for (Index row = 0; row < auto_diff.size(); ++row) {
auto_diff(row).value() = value(row);
auto_diff(row).derivatives().resize(num_derivs, 1);
auto_diff(row).derivatives() = gradient.row(row).transpose();
}
}
/** Returns an AutoDiff matrix given a matrix of values and a gradient
matrix.
@param[in] value The value matrix. Will be accessed with a single index.
@param[in] gradient The gradient matrix. The number of rows must match the
total size (nrow x ncol) of the value matrix. Derivatives of value(j) should
be stored in row j of the gradient matrix.
@retval auto_diff_matrix The matrix of AutoDiffScalars. Will have the same
dimensions as the value matrix.
@pydrake_mkdoc_identifier{value_and_gradient} */
template <typename DerivedValue, typename DerivedGradient>
AutoDiffMatrixType<DerivedValue, DerivedGradient::ColsAtCompileTime>
InitializeAutoDiff(
const Eigen::MatrixBase<DerivedValue>& value,
const Eigen::MatrixBase<DerivedGradient>& gradient) {
AutoDiffMatrixType<DerivedValue, DerivedGradient::ColsAtCompileTime>
auto_diff_matrix(value.rows(), value.cols());
InitializeAutoDiff(value, gradient, &auto_diff_matrix);
return auto_diff_matrix;
}
/** `B = DiscardZeroGradient(A, precision)` enables casting from a matrix of
AutoDiffScalars to AutoDiffScalar::Scalar type, but first checking that
the gradient matrix is empty or zero. For a matrix of type, e.g.
`MatrixX<AutoDiffXd> A`, the comparable operation
`B = A.cast<double>()`
should (and does) fail to compile. Use `DiscardZeroGradient(A)` if you want
to force the cast (and the check).
This method is overloaded to permit the user to call it for double types and
AutoDiffScalar types (to avoid the calling function having to handle the
two cases differently).
See ExtractValue() for a note on similar Drake functions.
@param precision is passed to Eigen's isZero(precision) to evaluate whether
the gradients are zero.
@throws std::exception if the gradients were not empty nor zero.
@see DiscardGradient() */
template <typename Derived>
typename std::enable_if_t<
!std::is_same_v<typename Derived::Scalar, double>,
Eigen::Matrix<typename Derived::Scalar::Scalar, Derived::RowsAtCompileTime,
Derived::ColsAtCompileTime, 0, Derived::MaxRowsAtCompileTime,
Derived::MaxColsAtCompileTime>>
DiscardZeroGradient(
const Eigen::MatrixBase<Derived>& auto_diff_matrix,
const typename Eigen::NumTraits<
typename Derived::Scalar::Scalar>::Real& precision =
Eigen::NumTraits<typename Derived::Scalar::Scalar>::dummy_precision()) {
const auto gradients = ExtractGradient(auto_diff_matrix);
if (gradients.size() == 0 || gradients.isZero(precision)) {
return ExtractValue(auto_diff_matrix);
}
throw std::runtime_error(
"Casting AutoDiff to value but gradients are not zero.");
}
/** @see DiscardZeroGradient(). */
template <typename Derived>
typename std::enable_if_t<std::is_same_v<typename Derived::Scalar, double>,
const Eigen::MatrixBase<Derived>&>
DiscardZeroGradient(const Eigen::MatrixBase<Derived>& matrix,
double precision = 0.) {
unused(precision);
return matrix;
}
/**
* Given a matrix of AutoDiffScalars, returns the size of the
* derivatives.
* @throw runtime_error if some entry has different (non-zero) number of
* derivatives as the others.
*/
template <typename Derived>
typename std::enable_if<!std::is_same_v<typename Derived::Scalar, double>,
int>::type
GetDerivativeSize(const Eigen::MatrixBase<Derived>& A) {
int num_derivs = 0;
for (int i = 0; i < A.rows(); ++i) {
for (int j = 0; j < A.cols(); ++j) {
if (A(i, j).derivatives().size() != 0) {
if (num_derivs != 0 && A(i, j).derivatives().size() != num_derivs) {
throw std::runtime_error(fmt::format(
"GetDerivativeSize(): A({}, {}).derivatives() has size "
"{}, while another entry has size {}",
i, j, A(i, j).derivatives().size(), num_derivs));
}
num_derivs = A(i, j).derivatives().size();
}
}
}
return num_derivs;
}
} // namespace math
} // namespace drake