Browse Source

remove the redundant lr step

Hamid Shojanazeri 1 year ago
parent
commit
4f70348b94
1 changed files with 0 additions and 2 deletions
  1. 0 2
      utils/train_utils.py

+ 0 - 2
utils/train_utils.py

@@ -193,8 +193,6 @@ def train(model, train_dataloader,eval_dataloader, tokenizer, optimizer, lr_sche
         else:
             print(f"Epoch {epoch+1}: train_perplexity={train_perplexity:.4f}, train_epoch_loss={train_epoch_loss:.4f}")
             
-        lr_scheduler.step()
-
     avg_train_prep = sum(train_prep)/len(train_prep)
     avg_train_loss = sum(train_loss)/len(train_loss)
     if train_config.run_validation: