Skip to content

Commit 8a9eca5

Browse files
committed
A few optimizer comments, dead import, missing import
1 parent 959eaff commit 8a9eca5

File tree

3 files changed

+8
-2
lines changed

3 files changed

+8
-2
lines changed

timm/optim/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
from .adafactor import Adafactor
44
from .adahessian import Adahessian
55
from .lookahead import Lookahead
6+
from .madgrad import MADGRAD
67
from .nadam import Nadam
78
from .nvnovograd import NvNovoGrad
89
from .radam import RAdam

timm/optim/madgrad.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,9 @@
1+
""" PyTorch MADGRAD optimizer
2+
3+
MADGRAD: https://arxiv.org/abs/2101.11075
4+
5+
Code from: https://github.com/facebookresearch/madgrad
6+
"""
17
# Copyright (c) Facebook, Inc. and its affiliates.
28
#
39
# This source code is licensed under the MIT license found in the

timm/optim/optim_factory.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,11 @@
11
""" Optimizer Factory w/ Custom Weight Decay
2-
Hacked together by / Copyright 2020 Ross Wightman
2+
Hacked together by / Copyright 2021 Ross Wightman
33
"""
44
from typing import Optional
55

66
import torch
77
import torch.nn as nn
88
import torch.optim as optim
9-
from torch.optim.optimizer import required
109

1110
from .adabelief import AdaBelief
1211
from .adafactor import Adafactor

0 commit comments

Comments
 (0)