From 10ff6bbbc270279493c73ae41e0ea6af5af0a60b Mon Sep 17 00:00:00 2001 From: Onuralp SEZER Date: Tue, 8 Aug 2023 18:14:33 +0300 Subject: [PATCH] feat: move import of OneCycleLR into top of the file for keep out of the for loop Signed-off-by: Onuralp SEZER --- train_flert_model.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/train_flert_model.py b/train_flert_model.py index 601f5dd..4447cd4 100644 --- a/train_flert_model.py +++ b/train_flert_model.py @@ -6,6 +6,7 @@ from flair.embeddings import TransformerWordEmbeddings from flair.models import SequenceTagger from flair.trainers import ModelTrainer +from torch.optim.lr_scheduler import OneCycleLR if __name__ == "__main__": @@ -74,8 +75,7 @@ output_folder = f"flert-{args.dataset}-{hf_model}-{seed}" # train with XLM parameters (AdamW, 20 epochs, small LR) - from torch.optim.lr_scheduler import OneCycleLR - + trainer.train( output_folder, learning_rate=5.0e-5,