forked from RobotLocomotion/drake
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdifferentiable_norm.h
39 lines (34 loc) · 1.24 KB
/
differentiable_norm.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
#pragma once
#include <limits>
#include <Eigen/Dense>
#include "drake/math/autodiff.h"
#include "drake/math/autodiff_gradient.h"
namespace drake {
namespace math {
/** The 2-norm function |x| is not differentiable at x=0 (its gradient is
x/|x|, which has a division-by-zero problem). On the other hand, x=0 happens
very often. Hence we return a subgradient x/(|x| + ε) when x is
almost 0, and returns the original gradient, x/|x|, otherwise. */
template <typename Derived>
typename Derived::Scalar DifferentiableNorm(
const Eigen::MatrixBase<Derived>& x) {
// We only support vectors for now.
static_assert(Derived::ColsAtCompileTime == 1);
const double kEps = std::numeric_limits<double>::epsilon();
if constexpr (std::is_same_v<typename Derived::Scalar, AutoDiffXd>) {
const Eigen::Matrix<double, Derived::RowsAtCompileTime,
Derived::ColsAtCompileTime>
x_val = ExtractValue(x);
const double norm_val = x_val.norm();
if (norm_val > 100 * kEps) {
return x.norm();
} else {
return AutoDiffXd(norm_val, ExtractGradient(x).transpose() * x_val /
(norm_val + 10 * kEps));
}
} else {
return x.norm();
}
}
} // namespace math
} // namespace drake