Skip to content

Commit

Permalink
Merge pull request nomic-ai#3 from nomic-ai/train
Browse files Browse the repository at this point in the history
log wandb multi-epoch
  • Loading branch information
AndriyMulyar authored Mar 29, 2023
2 parents aa4dd0e + 9c380d6 commit 252676f
Showing 1 changed file with 4 additions and 2 deletions.
6 changes: 4 additions & 2 deletions train.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,8 @@ def train(accelerator, config):
# log LR in case something weird happens
if step > 0 and step % (config["eval_every"] // 10) == 0:
if config["wandb"]:
accelerator.log({"lr": scheduler.get_last_lr()[0]}, step=step)
curr_step = step + epoch * len(train_dataloader)
accelerator.log({"lr": scheduler.get_last_lr()[0]}, step=curr_step)

if (step + 1) % gradient_accumulation_steps == 0 or step == len(train_dataloader) - 1:
optimizer.step()
Expand All @@ -151,7 +152,8 @@ def train(accelerator, config):
}

if config["wandb"]:
accelerator.log({**log_train, **log_val}, step=step)
curr_step = step + epoch * len(train_dataloader)
accelerator.log({**log_train, **log_val}, step=curr_step)

accelerator.print(f"Current LR: {scheduler.get_last_lr()[0]}")
accelerator.print(format_metrics(log_train, "train", f" step {step} "))
Expand Down

0 comments on commit 252676f

Please sign in to comment.