Skip to content

Commit

Permalink
Add new configs
Browse files Browse the repository at this point in the history
  • Loading branch information
kazemnejad committed Oct 30, 2021
1 parent 8ca71c0 commit 3692026
Show file tree
Hide file tree
Showing 6 changed files with 65 additions and 0 deletions.
8 changes: 8 additions & 0 deletions configs/models/kazemink_lr.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
model {
lr_scheduler {
type = kazemink_with_warmup
warmup_steps = 10
saturation_step = 5000
kazemink_coeff = 3
}
}
8 changes: 8 additions & 0 deletions configs/models/kazemink_lr_10.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
model {
lr_scheduler {
type = kazemink_with_warmup
warmup_steps = 10
saturation_step = 5000
kazemink_coeff = 10
}
}
8 changes: 8 additions & 0 deletions configs/models/kazemink_lr_30.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
model {
lr_scheduler {
type = kazemink_with_warmup
warmup_steps = 10
saturation_step = 5000
kazemink_coeff = 30
}
}
6 changes: 6 additions & 0 deletions configs/trainer/no_wd.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
model {

optimizer {
weight_decay = 0
}
}
Empty file modified scripts/launch_experiment.py
100644 → 100755
Empty file.
35 changes: 35 additions & 0 deletions src/modules/lr_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,4 +69,39 @@ def lr_fn(global_step):

super(ConstantWithWarmupScheduler, self).__init__(
optimizer, lr_fn, last_epoch
)


@LearningRateScheduler.register("kazemink_with_warmup")
class KazeminkLRScheduler(LearningRateScheduler):
def __init__(
self,
optimizer: torch.optim.Optimizer,
warmup_steps: int,
saturation_step: int,
kazemink_coeff: int,
last_epoch: int = -1,
):
"""Initialize configuration of the learning rate schedule.
Args:
warmup_steps: An integer, the number of steps required for linear warmup.
"""
self.saturation_step = saturation_step
self.warmup_steps = warmup_steps
self.kazemink_coeff = kazemink_coeff

def lr_fn(global_step):
lr_coeff = 1

# Apply linear warmup
if global_step < self.warmup_steps:
lr_coeff = global_step / self.warmup_steps

if global_step > self.saturation_step:
lr_coeff = self.kazemink_coeff

return lr_coeff

super(KazeminkLRScheduler, self).__init__(
optimizer, lr_fn, last_epoch
)

0 comments on commit 3692026

Please sign in to comment.