Skip to content

Commit

Permalink
[Doc][Org] move edge_softmax to functional (dmlc#2442)
Browse files Browse the repository at this point in the history
* move edge_softmax to functional

* change examples

* fixes

* revert

* fix

* fix

* remove

* fix

* test

* add init

Co-authored-by: Minjie Wang <[email protected]>
  • Loading branch information
BarclayII and jermainewang authored Jan 26, 2021
1 parent a613ad8 commit 6c23fba
Show file tree
Hide file tree
Showing 18 changed files with 39 additions and 28 deletions.
14 changes: 0 additions & 14 deletions docs/source/api/python/dgl.ops.rst
Original file line number Diff line number Diff line change
Expand Up @@ -236,20 +236,6 @@ The following is an example showing how GSDDMM works:

Like GSpMM, GSDDMM operators support both homogeneous and bipartite graph.

Edge Softmax module
-------------------

DGL also provide framework agnostic edge softmax module which was frequently used in
GNN-like structures, e.g.
`Graph Attention Network <https://arxiv.org/pdf/1710.10903.pdf>`_,
`Transformer <https://papers.nips.cc/paper/7181-attention-is-all-you-need.pdf>`_,
`Capsule <https://arxiv.org/pdf/1710.09829.pdf>`_, etc.

.. autosummary::
:toctree: ../../generated/

edge_softmax

Segment Reduce Module
---------------------

Expand Down
17 changes: 17 additions & 0 deletions docs/source/api/python/nn.rst
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,20 @@ dgl.nn
nn.pytorch
nn.mxnet
nn.tensorflow

dgl.nn.functional
=================

Edge Softmax module
-------------------

We also provide framework agnostic edge softmax module which was frequently used in
GNN-like structures, e.g.
`Graph Attention Network <https://arxiv.org/pdf/1710.10903.pdf>`_,
`Transformer <https://papers.nips.cc/paper/7181-attention-is-all-you-need.pdf>`_,
`Capsule <https://arxiv.org/pdf/1710.09829.pdf>`_, etc.

.. autosummary::
:toctree: ../../generated/

nn.functional.edge_softmax
2 changes: 1 addition & 1 deletion examples/pytorch/gat/gat.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import torch
import torch.nn as nn
import dgl.function as fn
from dgl.nn.pytorch import edge_softmax, GATConv
from dgl.nn import GATConv


class GAT(nn.Module):
Expand Down
2 changes: 1 addition & 1 deletion examples/pytorch/graphwriter/modules.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import torch
import math
import dgl.function as fn
from dgl.nn.pytorch import edge_softmax
from dgl.nn.functional import edge_softmax
from utlis import *
from torch import nn
import torch.nn.functional as F
Expand Down
2 changes: 1 addition & 1 deletion examples/pytorch/hgt/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import torch.nn as nn
import torch.nn.functional as F
import dgl.function as fn
from dgl.ops import edge_softmax
from dgl.nn.functional import edge_softmax

class HGTLayer(nn.Module):
def __init__(self,
Expand Down
2 changes: 1 addition & 1 deletion examples/pytorch/ogb/ogbn-arxiv/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from dgl import function as fn
from dgl._ffi.base import DGLError
from dgl.nn.pytorch.utils import Identity
from dgl.ops import edge_softmax
from dgl.nn.functional import edge_softmax
from dgl.utils import expand_as_pair


Expand Down
2 changes: 1 addition & 1 deletion examples/tensorflow/gat/gat.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import tensorflow as tf
from tensorflow.keras import layers
import dgl.function as fn
from dgl.nn.tensorflow import edge_softmax, GATConv
from dgl.nn import GATConv


class GAT(tf.keras.Model):
Expand Down
5 changes: 5 additions & 0 deletions python/dgl/nn/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@
import importlib
import sys
import os

# [BarclayII] Not sure what's going on with pylint.
# Possible issue: https://github.com/PyCQA/pylint/issues/2648
from . import functional # pylint: disable=import-self

from ..backend import backend_name
from ..utils import expand_as_pair

Expand Down
3 changes: 3 additions & 0 deletions python/dgl/nn/functional/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
"""Functions related to DGL NN Modules."""

from ...ops import edge_softmax
2 changes: 1 addition & 1 deletion python/dgl/nn/mxnet/conv/agnnconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from mxnet.gluon import nn

from .... import function as fn
from ....ops import edge_softmax
from ...functional import edge_softmax
from ..utils import normalize
from ....base import DGLError
from ....utils import expand_as_pair
Expand Down
2 changes: 1 addition & 1 deletion python/dgl/nn/mxnet/conv/gatconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

from .... import function as fn
from ....base import DGLError
from ....ops import edge_softmax
from ...functional import edge_softmax
from ....utils import expand_as_pair

#pylint: enable=W0235
Expand Down
2 changes: 1 addition & 1 deletion python/dgl/nn/mxnet/softmax.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
"""Gluon layer for graph related softmax."""
# pylint: disable= unused-import
from ...ops import edge_softmax
from ..functional import edge_softmax
2 changes: 1 addition & 1 deletion python/dgl/nn/pytorch/conv/agnnconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from torch.nn import functional as F

from .... import function as fn
from ....ops import edge_softmax
from ...functional import edge_softmax
from ....base import DGLError
from ....utils import expand_as_pair

Expand Down
2 changes: 1 addition & 1 deletion python/dgl/nn/pytorch/conv/dotgatconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from torch import nn

from .... import function as fn
from ....ops import edge_softmax
from ...functional import edge_softmax
from ....base import DGLError
from ....utils import expand_as_pair

Expand Down
2 changes: 1 addition & 1 deletion python/dgl/nn/pytorch/conv/gatconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from torch import nn

from .... import function as fn
from ....ops import edge_softmax
from ...functional import edge_softmax
from ....base import DGLError
from ..utils import Identity
from ....utils import expand_as_pair
Expand Down
2 changes: 1 addition & 1 deletion python/dgl/nn/pytorch/softmax.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
"""Torch modules for graph related softmax."""
# pylint: disable= unused-import
from ...ops import edge_softmax
from ..functional import edge_softmax
2 changes: 1 addition & 1 deletion python/dgl/nn/tensorflow/conv/gatconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from .... import function as fn
from ....base import DGLError
from ....ops import edge_softmax
from ...functional import edge_softmax
from ..utils import Identity

# pylint: enable=W0235
Expand Down
2 changes: 1 addition & 1 deletion python/dgl/nn/tensorflow/softmax.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
"""tf modules for graph related softmax."""
# pylint: disable= unused-import
from ...ops import edge_softmax
from ..functional import edge_softmax

0 comments on commit 6c23fba

Please sign in to comment.