forked from HazeDT/DAGCN
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
27 changed files
with
486 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,2 @@ | ||
#!/usr/bin/python | ||
# -*- coding:utf-8 -*- |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
|
||
from collections import Iterable | ||
|
||
def set_freeze_by_id(model, layer_num_last): | ||
for param in model.parameters(): | ||
param.requires_grad = False | ||
child_list = list(model.children())[-layer_num_last:] | ||
if not isinstance(child_list, Iterable): | ||
child_list = list(child_list) | ||
for child in child_list: | ||
for param in child.parameters(): | ||
param.requires_grad = True |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
#!/usr/bin/python | ||
# -*- coding:utf-8 -*- | ||
|
||
import logging | ||
|
||
def setlogger(path): | ||
logger = logging.getLogger() | ||
logger.setLevel(logging.INFO) | ||
logFormatter = logging.Formatter("%(asctime)s %(message)s", "%m-%d %H:%M:%S") | ||
|
||
fileHandler = logging.FileHandler(path) | ||
fileHandler.setFormatter(logFormatter) | ||
logger.addHandler(fileHandler) | ||
|
||
consoleHandler = logging.StreamHandler() | ||
consoleHandler.setFormatter(logFormatter) | ||
logger.addHandler(consoleHandler) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,83 @@ | ||
import types | ||
import math | ||
from collections import Counter | ||
from functools import partial | ||
|
||
from torch.optim.optimizer import Optimizer | ||
|
||
|
||
class _LRScheduler(object): | ||
def __init__(self, optimizer, last_epoch=-1): | ||
if not isinstance(optimizer, Optimizer): | ||
raise TypeError('{} is not an Optimizer'.format( | ||
type(optimizer).__name__)) | ||
self.optimizer = optimizer | ||
if last_epoch == -1: | ||
for group in optimizer.param_groups: | ||
group.setdefault('initial_lr', group['lr']) | ||
last_epoch = 0 | ||
else: | ||
for i, group in enumerate(optimizer.param_groups): | ||
if 'initial_lr' not in group: | ||
raise KeyError("param 'initial_lr' is not specified " | ||
"in param_groups[{}] when resuming an optimizer".format(i)) | ||
self.base_lrs = list(map(lambda group: group['initial_lr'], optimizer.param_groups)) | ||
self.step(last_epoch) | ||
|
||
def state_dict(self): | ||
"""Returns the state of the scheduler as a :class:`dict`. | ||
It contains an entry for every variable in self.__dict__ which | ||
is not the optimizer. | ||
""" | ||
return {key: value for key, value in self.__dict__.items() if key != 'optimizer'} | ||
|
||
def load_state_dict(self, state_dict): | ||
"""Loads the schedulers state. | ||
Arguments: | ||
state_dict (dict): scheduler state. Should be an object returned | ||
from a call to :meth:`state_dict`. | ||
""" | ||
self.__dict__.update(state_dict) | ||
|
||
def get_lr(self): | ||
raise NotImplementedError | ||
|
||
def step(self, epoch=None): | ||
if epoch is None: | ||
epoch = self.last_epoch + 1 | ||
self.last_epoch = epoch | ||
for param_group, lr in zip(self.optimizer.param_groups, self.get_lr()): | ||
param_group['lr'] = lr | ||
|
||
|
||
class transferLearning(_LRScheduler): | ||
""" | ||
Decays the learning rate of each parameter group by gamma every | ||
step_size epochs. Notice that such decay can happen simultaneously with | ||
other changes to the learning rate from outside this scheduler. When | ||
last_epoch=-1, sets initial lr as lr. | ||
Args: | ||
optimizer (Optimizer): Wrapped optimizer. | ||
step_size (int): Period of learning rate decay. | ||
gamma (float): Multiplicative factor of learning rate decay. | ||
Default: 0.1. | ||
last_epoch (int): The index of last epoch. Default: -1. | ||
""" | ||
|
||
def __init__(self, optimizer, param_lr, max_epoch, alpha=10, beta=0.75, last_epoch=-1): | ||
self.param_lr = param_lr | ||
self.max_epoch = max_epoch | ||
self.alpha = alpha | ||
self.beta = beta | ||
super(transferLearning, self).__init__(optimizer, last_epoch) | ||
|
||
def get_lr(self): | ||
if self.last_epoch == 0: | ||
return self.base_lrs | ||
return [lr * 1 / (1 + self.alpha*self.last_epoch/self.max_epoch) ** self.beta | ||
for lr in self.param_lr] | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
#!/usr/bin/python | ||
# -*- coding:utf-8 -*- | ||
|
||
import os | ||
|
||
class Save_Tool(object): | ||
def __init__(self, max_num=10): | ||
self.save_list = [] | ||
self.max_num = max_num | ||
|
||
def update(self, save_path): | ||
if len(self.save_list) < self.max_num: | ||
self.save_list.append(save_path) | ||
else: | ||
remove_path = self.save_list[0] | ||
del self.save_list[0] | ||
self.save_list.append(save_path) | ||
if os.path.exists(remove_path): | ||
os.remove(remove_path) |
Oops, something went wrong.