diff --git a/Classification/cnns/optimizer_util.py b/Classification/cnns/optimizer_util.py index 156994c..7cba7f6 100755 --- a/Classification/cnns/optimizer_util.py +++ b/Classification/cnns/optimizer_util.py @@ -66,8 +66,8 @@ def set_up_optimizer(loss, args): warmup=warmup ) elif args.lr_decay == 'polynomial': - # PolynomialSchduler - lr_scheduler = flow.optimizer.PolynomialSchduler( + # PolynomialScheduler + lr_scheduler = flow.optimizer.PolynomialScheduler( base_lr=args.learning_rate, steps=decay_batches, end_learning_rate=0.00001, diff --git a/LanguageModeling/BERT/util.py b/LanguageModeling/BERT/util.py index 6b04d55..7b50719 100755 --- a/LanguageModeling/BERT/util.py +++ b/LanguageModeling/BERT/util.py @@ -142,7 +142,7 @@ def callback(outputs): def CreateOptimizer(args): warmup_batches = int(args.iter_num * args.warmup_proportion) lr_warmup = flow.optimizer.warmup.linear(warmup_batches, 0) - lr_scheduler = flow.optimizer.PolynomialSchduler(args.learning_rate, args.iter_num, 0.0, + lr_scheduler = flow.optimizer.PolynomialScheduler(args.learning_rate, args.iter_num, 0.0, warmup=lr_warmup) loss_scale_policy = None if args.use_fp16: