From 00a307ed8aee5afd77478681e96ce568e7b86104 Mon Sep 17 00:00:00 2001 From: Akhilesh Gotmare Date: Tue, 21 Apr 2020 13:49:17 +0800 Subject: [PATCH] Removing import WarmupLinearSchedule Not supported by latest transformers library. See issue - https://github.com/huggingface/transformers/issues/2082 If warmup is required, we can instead use `scheduler = get_linear_schedule_with_warmup(optimizer, num_warmup_steps=WARMUP_STEPS, num_training_steps = -1)` instead of `scheduler = WarmupLinearSchedule(optimizer, warmup_steps=WARMUP_STEPS, t_total = -1)` --- cbert_finetune.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cbert_finetune.py b/cbert_finetune.py index 7e1b281..695fc37 100755 --- a/cbert_finetune.py +++ b/cbert_finetune.py @@ -14,7 +14,7 @@ import torch from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler -from transformers import BertTokenizer, BertModel, BertForMaskedLM, AdamW, WarmupLinearSchedule +from transformers import BertTokenizer, BertModel, BertForMaskedLM, AdamW #import train_text_classifier_new import cbert_utils @@ -161,4 +161,4 @@ def main(): torch.save(model, save_model_path) if __name__ == "__main__": - main() \ No newline at end of file + main()