Skip to content

Commit 89147a9

Browse files
committed
Remove adabound optimizer, never got it working well on larger datasets
1 parent 0a84dd5 commit 89147a9

File tree

3 files changed

+2
-128
lines changed

3 files changed

+2
-128
lines changed

optim/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
from optim.adabound import AdaBound
21
from optim.nadam import Nadam
32
from optim.rmsprop_tf import RMSpropTF
4-
from optim.optim_factory import create_optimizer
3+
from optim.optim_factory import create_optimizer

optim/adabound.py

Lines changed: 0 additions & 121 deletions
This file was deleted.

optim/optim_factory.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from torch import optim as optim
2-
from optim import Nadam, AdaBound, RMSpropTF
2+
from optim import Nadam, RMSpropTF
33

44

55
def add_weight_decay(model, weight_decay=1e-5, skip_list=()):
@@ -35,10 +35,6 @@ def create_optimizer(args, model, filter_bias_and_bn=True):
3535
elif args.opt.lower() == 'nadam':
3636
optimizer = Nadam(
3737
parameters, lr=args.lr, weight_decay=weight_decay, eps=args.opt_eps)
38-
elif args.opt.lower() == 'adabound':
39-
optimizer = AdaBound(
40-
parameters, lr=args.lr / 100, weight_decay=weight_decay, eps=args.opt_eps,
41-
final_lr=args.lr)
4238
elif args.opt.lower() == 'adadelta':
4339
optimizer = optim.Adadelta(
4440
parameters, lr=args.lr, weight_decay=weight_decay, eps=args.opt_eps)

0 commit comments

Comments
 (0)