Commit b3f197bd authored by Anthony Larcher's avatar Anthony Larcher
Browse files

add cyclicLR1

parent bca82ed4
...@@ -1207,6 +1207,19 @@ def get_optimizer(model, model_opts, train_opts, training_loader): ...@@ -1207,6 +1207,19 @@ def get_optimizer(model, model_opts, train_opts, training_loader):
step_size_down=None, step_size_down=None,
cycle_momentum=cycle_momentum, cycle_momentum=cycle_momentum,
mode="triangular2") mode="triangular2")
elif train_opts["scheduler"]["type"] == 'CyclicLR1':
cycle_momentum = True
if train_opts["optimizer"]["type"] == "adam":
cycle_momentum = False
scheduler = torch.optim.lr_scheduler.CyclicLR(optimizer=optimizer,
base_lr=1e-8,
max_lr=train_opts["lr"],
step_size_up=model_opts["speaker_number"] * 2,
step_size_down=None,
cycle_momentum=cycle_momentum,
mode="triangular")
elif train_opts["scheduler"]["type"] == "MultiStepLR": elif train_opts["scheduler"]["type"] == "MultiStepLR":
scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizer, scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer=optimizer,
milestones=[10000,50000,100000], milestones=[10000,50000,100000],
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment