Skip to content

Commit

Permalink
Add files via upload
Browse files Browse the repository at this point in the history
  • Loading branch information
HazeDT authored Mar 13, 2021
1 parent 833c85e commit 28e6847
Show file tree
Hide file tree
Showing 27 changed files with 486 additions and 0 deletions.
2 changes: 2 additions & 0 deletions DAGCN/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
#!/usr/bin/python
# -*- coding:utf-8 -*-
Binary file added DAGCN/utils/__pycache__/__init__.cpython-35.pyc
Binary file not shown.
Binary file added DAGCN/utils/__pycache__/__init__.cpython-36.pyc
Binary file not shown.
Binary file added DAGCN/utils/__pycache__/__init__.cpython-37.pyc
Binary file not shown.
Binary file added DAGCN/utils/__pycache__/__init__.cpython-38.pyc
Binary file not shown.
Binary file added DAGCN/utils/__pycache__/freeze.cpython-38.pyc
Binary file not shown.
Binary file added DAGCN/utils/__pycache__/logger.cpython-35.pyc
Binary file not shown.
Binary file added DAGCN/utils/__pycache__/logger.cpython-36.pyc
Binary file not shown.
Binary file added DAGCN/utils/__pycache__/logger.cpython-37.pyc
Binary file not shown.
Binary file added DAGCN/utils/__pycache__/logger.cpython-38.pyc
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file added DAGCN/utils/__pycache__/save.cpython-35.pyc
Binary file not shown.
Binary file added DAGCN/utils/__pycache__/save.cpython-36.pyc
Binary file not shown.
Binary file added DAGCN/utils/__pycache__/save.cpython-37.pyc
Binary file not shown.
Binary file added DAGCN/utils/__pycache__/save.cpython-38.pyc
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
12 changes: 12 additions & 0 deletions DAGCN/utils/freeze.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@

from collections import Iterable

def set_freeze_by_id(model, layer_num_last):
for param in model.parameters():
param.requires_grad = False
child_list = list(model.children())[-layer_num_last:]
if not isinstance(child_list, Iterable):
child_list = list(child_list)
for child in child_list:
for param in child.parameters():
param.requires_grad = True
17 changes: 17 additions & 0 deletions DAGCN/utils/logger.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#!/usr/bin/python
# -*- coding:utf-8 -*-

import logging

def setlogger(path):
logger = logging.getLogger()
logger.setLevel(logging.INFO)
logFormatter = logging.Formatter("%(asctime)s %(message)s", "%m-%d %H:%M:%S")

fileHandler = logging.FileHandler(path)
fileHandler.setFormatter(logFormatter)
logger.addHandler(fileHandler)

consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(logFormatter)
logger.addHandler(consoleHandler)
83 changes: 83 additions & 0 deletions DAGCN/utils/lr_scheduler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
import types
import math
from collections import Counter
from functools import partial

from torch.optim.optimizer import Optimizer


class _LRScheduler(object):
def __init__(self, optimizer, last_epoch=-1):
if not isinstance(optimizer, Optimizer):
raise TypeError('{} is not an Optimizer'.format(
type(optimizer).__name__))
self.optimizer = optimizer
if last_epoch == -1:
for group in optimizer.param_groups:
group.setdefault('initial_lr', group['lr'])
last_epoch = 0
else:
for i, group in enumerate(optimizer.param_groups):
if 'initial_lr' not in group:
raise KeyError("param 'initial_lr' is not specified "
"in param_groups[{}] when resuming an optimizer".format(i))
self.base_lrs = list(map(lambda group: group['initial_lr'], optimizer.param_groups))
self.step(last_epoch)

def state_dict(self):
"""Returns the state of the scheduler as a :class:`dict`.
It contains an entry for every variable in self.__dict__ which
is not the optimizer.
"""
return {key: value for key, value in self.__dict__.items() if key != 'optimizer'}

def load_state_dict(self, state_dict):
"""Loads the schedulers state.
Arguments:
state_dict (dict): scheduler state. Should be an object returned
from a call to :meth:`state_dict`.
"""
self.__dict__.update(state_dict)

def get_lr(self):
raise NotImplementedError

def step(self, epoch=None):
if epoch is None:
epoch = self.last_epoch + 1
self.last_epoch = epoch
for param_group, lr in zip(self.optimizer.param_groups, self.get_lr()):
param_group['lr'] = lr


class transferLearning(_LRScheduler):
"""
Decays the learning rate of each parameter group by gamma every
step_size epochs. Notice that such decay can happen simultaneously with
other changes to the learning rate from outside this scheduler. When
last_epoch=-1, sets initial lr as lr.
Args:
optimizer (Optimizer): Wrapped optimizer.
step_size (int): Period of learning rate decay.
gamma (float): Multiplicative factor of learning rate decay.
Default: 0.1.
last_epoch (int): The index of last epoch. Default: -1.
"""

def __init__(self, optimizer, param_lr, max_epoch, alpha=10, beta=0.75, last_epoch=-1):
self.param_lr = param_lr
self.max_epoch = max_epoch
self.alpha = alpha
self.beta = beta
super(transferLearning, self).__init__(optimizer, last_epoch)

def get_lr(self):
if self.last_epoch == 0:
return self.base_lrs
return [lr * 1 / (1 + self.alpha*self.last_epoch/self.max_epoch) ** self.beta
for lr in self.param_lr]


19 changes: 19 additions & 0 deletions DAGCN/utils/save.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#!/usr/bin/python
# -*- coding:utf-8 -*-

import os

class Save_Tool(object):
def __init__(self, max_num=10):
self.save_list = []
self.max_num = max_num

def update(self, save_path):
if len(self.save_list) < self.max_num:
self.save_list.append(save_path)
else:
remove_path = self.save_list[0]
del self.save_list[0]
self.save_list.append(save_path)
if os.path.exists(remove_path):
os.remove(remove_path)
Loading

0 comments on commit 28e6847

Please sign in to comment.