Skip to content

Commit

Permalink
adjust structure
Browse files Browse the repository at this point in the history
split models package, add tools package
  • Loading branch information
JunguangJiang committed Mar 9, 2020
1 parent 713af92 commit 7526f0a
Show file tree
Hide file tree
Showing 30 changed files with 526 additions and 427 deletions.
3 changes: 1 addition & 2 deletions dalib/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
from .models import *
from .datasets import *
__all__ = ['datasets', 'adaptation', 'vision', 'text']
1 change: 1 addition & 0 deletions dalib/adaptation/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
__all__ = ["cdan", "dann", "mdd", "dan", "afn"]
File renamed without changes.
31 changes: 16 additions & 15 deletions dalib/models/afn.py → dalib/adaptation/afn.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import math
import torch.nn as nn
from .classifier import Classifier as ClassifierBase
from dalib.vision.classifier import Classifier as ClassifierBase


class StepwiseAdaptiveFeatureNorm(nn.Module):
Expand Down Expand Up @@ -30,6 +30,21 @@ def forward(self, features):
return ((features.norm(p=2, dim=1) - radius) ** 2).mean()


class L2PreservedDropout(nn.Module):
"""Dropout that preserves the L 1-norm in both of the training and evaluation phases.
.. TODO math definitions
"""
def __init__(self, *args, **kwargs):
super(L2PreservedDropout, self).__init__()
self.dropout = nn.Dropout(*args, **kwargs)

def forward(self, input):
output = self.dropout(input)
if self.training:
output.mul_(math.sqrt(1 - self.dropout.p))
return output


class Classifier(ClassifierBase):
def __init__(self, backbone, num_classes, bottleneck_dim=1000, dropout_p=0.5):
bottleneck = nn.Sequential(
Expand All @@ -48,17 +63,3 @@ def get_parameters(self):
]
return params


class L2PreservedDropout(nn.Module):
"""Dropout that preserves the L 1-norm in both of the training and evaluation phases.
.. TODO math definitions
"""
def __init__(self, *args, **kwargs):
super(L2PreservedDropout, self).__init__()
self.dropout = nn.Dropout(*args, **kwargs)

def forward(self, input):
output = self.dropout(input)
if self.training:
output.mul_(math.sqrt(1 - self.dropout.p))
return output
4 changes: 2 additions & 2 deletions dalib/models/cdan.py → dalib/adaptation/cdan.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@
import torch.nn as nn
import torch.nn.functional as F
from ._util import binary_accuracy, WarmStartGradientReverseLayer
from .classifier import Classifier as ClassifierBase
from dalib.vision.classifier import Classifier as ClassifierBase


__all__ = ['DomainDiscriminator', 'ConditionalDomainAdversarialLoss']


class DomainDiscriminator(nn.Module):
"""Domain discriminator model. See class:`dalib.models.dann.DomainDiscriminator` for details.
"""Domain discriminator model. See class:`dalib.adaptation.dann.DomainDiscriminator` for details.
"""
def __init__(self, in_feature, hidden_size):
super(DomainDiscriminator, self).__init__()
Expand Down
4 changes: 2 additions & 2 deletions dalib/models/dan.py → dalib/adaptation/dan.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import torch
import torch.nn as nn
from .classifier import Classifier as ClassifierBase
from dalib.vision.classifier import Classifier as ClassifierBase

__all__ = ['MultipleKernelMaximumMeanDiscrepancy', 'GaussianKernel']

Expand All @@ -22,7 +22,7 @@ class MultipleKernelMaximumMeanDiscrepancy(nn.Module):
>>> feature_dim = 1024
>>> batch_size = 10
>>> kernels = [GaussianKernel(alpha=0.5), GaussianKernel(1.), GaussianKernel(2.)]
>>> loss = models.dan.MultipleKernelMaximumMeanDiscrepancy(kernels)
>>> loss = adaptation.dan.MultipleKernelMaximumMeanDiscrepancy(kernels)
>>> # features from source domain and target domain
>>> f_s, f_t = torch.randn(batch_size, feature_dim), torch.randn(batch_size, feature_dim)
>>> output = loss(f_s, f_t)
Expand Down
2 changes: 1 addition & 1 deletion dalib/models/dann.py → dalib/adaptation/dann.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import torch
import torch.nn as nn
from ._util import WarmStartGradientReverseLayer, binary_accuracy
from .classifier import Classifier as ClassifierBase
from dalib.vision.classifier import Classifier as ClassifierBase

__all__ = ['DomainDiscriminator', 'DomainAdversarialLoss']

Expand Down
4 changes: 2 additions & 2 deletions dalib/models/mdd.py → dalib/adaptation/mdd.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ class MarginDisparityDiscrepancyLoss(nn.Module):
.. note::
MarginDisparityDiscrepancyLoss has already used GradientReverseLayer, thus adversarial_classifier is
merely a classifier head, e.g. `dalib.models.mdd.AdversarialClassifier`
merely a classifier head, e.g. `dalib.adaptation.mdd.AdversarialClassifier`
Shape:
- y_s, y_t: :math:`(N, C)` where C = number of classes.
Expand Down Expand Up @@ -127,7 +127,7 @@ def forward(self, x):


class Classifier(nn.Module):
"""Classifier for MDD. Similar as `nn.dalib.models.classifier.Classifier`"""
"""Classifier for MDD. Similar as `nn.dalib.vision.classifier.Classifier`"""
def __init__(self, backbone, num_classes, use_bottleneck=True, bottleneck_dim=1024, head_bottleneck_dim=1024):
super(Classifier, self).__init__()
self.backbone = backbone
Expand Down
1 change: 0 additions & 1 deletion dalib/models/__init__.py

This file was deleted.

1 change: 0 additions & 1 deletion dalib/models/backbones/__init__.py

This file was deleted.

Empty file added dalib/text/__init__.py
Empty file.
3 changes: 3 additions & 0 deletions dalib/vision/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from .resnet import *

__all__ = ['resnet', 'classifier']
File renamed without changes.
File renamed without changes.
10 changes: 5 additions & 5 deletions docs/Makefile
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
# Minimal makefile for Sphinx documentation
#

# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
SPHINXPROJ = dalib
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = source
BUILDDIR = build

Expand All @@ -17,4 +17,4 @@ help:
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
55 changes: 55 additions & 0 deletions docs/source/dalib.adaptation.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
dalib.adaptation package
========================

Module contents
---------------

.. automodule:: dalib.adaptation

dalib.adaptation.dan module
---------------------------

.. autoclass:: dalib.adaptation.dan.MultipleKernelMaximumMeanDiscrepancy
:show-inheritance:

.. autoclass:: dalib.adaptation.dan.GaussianKernel
:show-inheritance:

dalib.adaptation.dann module
----------------------------

.. autoclass:: dalib.adaptation.dann.DomainAdversarialLoss
:show-inheritance:

.. autoclass:: dalib.adaptation.dann.DomainDiscriminator
:show-inheritance:

dalib.adaptation.cdan module
----------------------------

.. autoclass:: dalib.adaptation.cdan.ConditionalDomainAdversarialLoss
:show-inheritance:

.. autoclass:: dalib.adaptation.cdan.DomainDiscriminator
:show-inheritance:

dalib.adaptation.mdd module
----------------------------

.. autoclass:: dalib.adaptation.mdd.MarginDisparityDiscrepancyLoss
:show-inheritance:

.. autoclass:: dalib.adaptation.mdd.MarginDisparityDiscrepancy
:show-inheritance:

.. autoclass:: dalib.adaptation.mdd.AdversarialClassifier
:show-inheritance:

.. autoclass:: dalib.adaptation.mdd.Classifier
:show-inheritance:

dalib.adaptation.afn module
----------------------------

.. autoclass:: dalib.adaptation.afn.StepwiseAdaptiveFeatureNorm
:show-inheritance:
22 changes: 0 additions & 22 deletions docs/source/dalib.models.backbones.rst

This file was deleted.

74 changes: 0 additions & 74 deletions docs/source/dalib.models.rst

This file was deleted.

3 changes: 2 additions & 1 deletion docs/source/dalib.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@ Subpackages
.. toctree::

dalib.datasets
dalib.models
dalib.adaptation
dalib.vision

Module contents
---------------
Expand Down
10 changes: 10 additions & 0 deletions docs/source/dalib.text.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
dalib.text package
==================

Module contents
---------------

.. automodule:: dalib.text
:members:
:undoc-members:
:show-inheritance:
25 changes: 25 additions & 0 deletions docs/source/dalib.vision.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
dalib.vision package
====================

Module contents
---------------

.. automodule:: dalib.vision
:members:
:show-inheritance:

dalib.vision.classifier module
------------------------------

.. autoclass:: dalib.vision.classifier.Classifier
:show-inheritance:


dalib.vision.resnet module
--------------------------

.. automodule:: dalib.vision.resnet
:members:
:undoc-members:
:show-inheritance:

Loading

0 comments on commit 7526f0a

Please sign in to comment.