Skip to content

Commit

Permalink
Add adaptive_lp_pool
Browse files Browse the repository at this point in the history
  • Loading branch information
seanmor5 committed Aug 28, 2021
1 parent 5431469 commit 92502e4
Show file tree
Hide file tree
Showing 5 changed files with 86 additions and 12 deletions.
17 changes: 14 additions & 3 deletions lib/axon.ex
Original file line number Diff line number Diff line change
Expand Up @@ -919,7 +919,8 @@ defmodule Axon do

@adaptive_pooling_layers [
{:adaptive_avg_pool, "Adaptive average pool", "an"},
{:adaptive_max_pool, "Adaptive max pool", "an"}
{:adaptive_max_pool, "Adaptive max pool", "an"},
{:adaptive_lp_pool, "Adaptive power average pool", "an"}
]

for {pool, name, a_or_an} <- @adaptive_pooling_layers do
Expand Down Expand Up @@ -955,7 +956,17 @@ defmodule Axon do
output_size = tuple_or_duplicate(:output_size, output_size, inner_rank)
output_shape = Axon.Shape.adaptive_pool(parent_shape, output_size)

layer(x, pool, output_shape, %{}, opts[:name], output_size: output_size)
name = opts[:name]

opts =
if pool == :adaptive_lp_pool do
norm = opts[:norm] || 2
[output_size: output_size, norm: norm]
else
[output_size: output_size]
end

layer(x, pool, output_shape, %{}, name, opts)
end

## Global Pooling
Expand Down Expand Up @@ -994,7 +1005,7 @@ defmodule Axon do

opts =
if pool == :global_lp_pool do
norm = opts[:norm] || 1
norm = opts[:norm] || 2
[keep_axes: keep_axes, norm: norm]
else
[keep_axes: keep_axes]
Expand Down
8 changes: 7 additions & 1 deletion lib/axon/compiler.ex
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,13 @@ defmodule Axon.Compiler do

## Pooling Layers

@pooling_layers [:max_pool, :avg_pool, :adaptive_avg_pool, :adaptive_max_pool] ++
@pooling_layers [
:max_pool,
:avg_pool,
:adaptive_avg_pool,
:adaptive_max_pool,
:adaptive_lp_pool
] ++
[:lp_pool, :global_lp_pool, :global_max_pool, :global_avg_pool]

defp recur_predict_fun(
Expand Down
69 changes: 63 additions & 6 deletions lib/axon/layers.ex
Original file line number Diff line number Diff line change
Expand Up @@ -403,7 +403,7 @@ defmodule Axon.Layers do
Depthwise convolutions apply a single convolutional filter to
each input channel. This is done by setting `feature_group_size`
equal to the number of input channels. This will split the
output_channels into `input_channels` number of groups and
`output_channels` into `input_channels` number of groups and
convolve the grouped kernel channels over the corresponding input
channel.
Expand Down Expand Up @@ -752,7 +752,9 @@ defmodule Axon.Layers do
## Options
* `kernel_size` - window size. Rank must match spatial dimension
* `:norm` - $p$ from above equation. Defaults to 2.
* `:kernel_size` - window size. Rank must match spatial dimension
of the input tensor. Required.
* `:strides` - kernel strides. Can be a scalar or a list
Expand Down Expand Up @@ -933,6 +935,61 @@ defmodule Axon.Layers do
|> Nx.window_max(window_dimensions, padding: :valid, strides: window_strides)
end

@doc """
Functional implementation of general dimensional adaptive power
average pooling.
Computes:
$$f(X) = \sqrt[p]{\sum_{x \in X} x^{p}}$$
Adaptive pooling allows you to specify the desired output size
of the transformed input. This will automatically adapt the
window size and strides to obtain the desired output size. It
will then perform max pooling using the calculated window
size and strides.
Adaptive pooling can be useful when working on multiple inputs with
different spatial input shapes. You can guarantee the output of
an adaptive pooling operation is always the same size regardless
of input shape.
## Options
* `:norm` - $p$ from above equation. Defaults to 2.
* `:output_size` - spatial output size. Must be a tuple with
size equal to the spatial dimensions in the input tensor.
Required.
"""
@doc type: :pooling
defn adaptive_lp_pool(input, opts \\ []) do
opts = keyword!(opts, [:output_size, norm: 2])

norm = opts[:norm]

window_strides =
transform(
{Nx.shape(input), Nx.rank(input), opts[:output_size]},
fn {shape, rank, output_size} ->
Axon.Shape.adaptive_pool_window_strides(shape, output_size, rank - 2)
end
)

window_dimensions =
transform(
{Nx.shape(input), Nx.rank(input), window_strides, opts[:output_size]},
fn {shape, rank, strides, output_size} ->
Axon.Shape.adaptive_pool_window_size(shape, strides, output_size, rank - 2)
end
)

input
|> Nx.power(norm)
|> Nx.window_sum(window_dimensions, padding: :valid, strides: window_strides)
|> Nx.power(Nx.divide(Nx.tensor(1, type: Nx.type(input)), norm))
end

## Normalization

@doc ~S"""
Expand Down Expand Up @@ -1447,11 +1504,11 @@ defmodule Axon.Layers do
* `:keep_axes` - option to keep reduced axes with size 1 for each reduced
dimensions. Defaults to `false`
* `:norm` - $p$ in above function. Defaults to `1`
* `:norm` - $p$ in above function. Defaults to 2
## Examples
iex> Axon.Layers.global_lp_pool(Nx.iota({3, 2, 3}, type: {:f, 32}))
iex> Axon.Layers.global_lp_pool(Nx.iota({3, 2, 3}, type: {:f, 32}), norm: 1)
#Nx.Tensor<
f32[3][2]
[
Expand All @@ -1461,7 +1518,7 @@ defmodule Axon.Layers do
]
>
iex> Axon.Layers.global_lp_pool(Nx.iota({1, 3, 2, 2}, type: {:f, 16}), norm: 2, keep_axes: true)
iex> Axon.Layers.global_lp_pool(Nx.iota({1, 3, 2, 2}, type: {:f, 16}), keep_axes: true)
#Nx.Tensor<
f16[1][3][1][1]
[
Expand All @@ -1480,7 +1537,7 @@ defmodule Axon.Layers do
>
"""
defn global_lp_pool(x, opts \\ []) do
opts = keyword!(opts, norm: 1, keep_axes: false)
opts = keyword!(opts, norm: 2, keep_axes: false)

norm = opts[:norm]

Expand Down
2 changes: 1 addition & 1 deletion test/axon_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -513,7 +513,7 @@ defmodule AxonTest do
end
end

@adaptive_pooling_layers [:adaptive_avg_pool, :adaptive_max_pool]
@adaptive_pooling_layers [:adaptive_avg_pool, :adaptive_max_pool, :adaptive_lp_pool]

describe "adaptive pooling" do
test "works with options" do
Expand Down
2 changes: 1 addition & 1 deletion test/compiler_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -382,7 +382,7 @@ defmodule CompilerTest do
end
end

@adaptive_pooling_layers [:adaptive_avg_pool, :adaptive_max_pool]
@adaptive_pooling_layers [:adaptive_avg_pool, :adaptive_max_pool, :adaptive_lp_pool]

describe "adaptive pooling" do
test "initializes with no params" do
Expand Down

0 comments on commit 92502e4

Please sign in to comment.