@@ -47,8 +47,6 @@ class ReduceLR(_ParamScheduler):
47
47
eps (float, optional): Minimal decay applied to lr. If the difference
48
48
between new and old lr is smaller than eps, the update is
49
49
ignored. Default: 1e-8.
50
- verbose (bool): If ``True``, prints a message to stdout for
51
- each update. Default: ``False``.
52
50
begin (int): Step at which to start updating the learning rate.
53
51
Defaults to 0.
54
52
end (int): Step at which to stop updating the learning rate.
@@ -68,7 +66,6 @@ def __init__(self,
68
66
cooldown : int = 0 ,
69
67
min_lr : float = 0. ,
70
68
eps : float = 1e-8 ,
71
- verbose : bool = False ,
72
69
** kwargs ):
73
70
74
71
super ().__init__ (optimizer = optimizer , param_name = 'lr' , ** kwargs )
@@ -99,7 +96,6 @@ def __init__(self,
99
96
self .mode_worse = None # the worse value for the chosen mode
100
97
self .min_lr = min_lr
101
98
self .eps = eps
102
- self .verbose = verbose
103
99
self .last_epoch = 0
104
100
self ._init_is_better (self .mode )
105
101
self ._reset ()
@@ -130,11 +126,7 @@ def _get_value(self):
130
126
for group in self .optimizer .param_groups :
131
127
regular_lr = group [self .param_name ]
132
128
if regular_lr - regular_lr * self .factor > self .eps :
133
- new_lr = max (regular_lr * self .factor , self .min_lr )
134
- if self .verbose :
135
- print (f'Reducing learning rate of { group } from '
136
- f'{ regular_lr :.4e} to { new_lr :.4e} .' )
137
- regular_lr = new_lr
129
+ regular_lr = max (regular_lr * self .factor , self .min_lr )
138
130
results .append (regular_lr )
139
131
return results
140
132
0 commit comments