@@ -398,7 +398,7 @@ class NaturalExpDecay(LRScheduler):
398398
399399 Args:
400400 learning_rate (float): The initial learning rate. It is a python float number.
401- gamma (float, optional): A Ratio to update the learning rate. Default: 0.1.
401+ gamma (float, optional): A Ratio to update the learning rate, should greater than 0.0 to make learning rate decay . Default: 0.1.
402402 last_epoch (int, optional): The index of last epoch. Can be set to restart training. Default: -1, means initial learning rate.
403403 verbose (bool, optional): If ``True``, prints a message to stdout for each update. Default: ``False`` .
404404
@@ -456,6 +456,7 @@ class NaturalExpDecay(LRScheduler):
456456 """
457457
458458 def __init__ (self , learning_rate , gamma , last_epoch = - 1 , verbose = False ):
459+ assert gamma > 0.0 , " 'gamma' must be a positive number so that the learning rate will decay."
459460 self .gamma = gamma
460461 super (NaturalExpDecay , self ).__init__ (learning_rate , last_epoch ,
461462 verbose )
@@ -573,7 +574,7 @@ class PolynomialDecay(LRScheduler):
573574 learning_rate (float): The initial learning rate. It is a python float number.
574575 decay_steps(int): The decay step size. It determines the decay cycle. It must be a positive integer.
575576 end_lr(float, optional): The minimum final learning rate. Default: 0.0001.
576- power(float, optional): Power of polynomial. Default: 1.0.
577+ power(float, optional): Power of polynomial, should greater than 0.0 to get learning rate decay . Default: 1.0.
577578 cycle(bool, optional): Whether the learning rate rises again. If True, then the learning rate will rise when it decrease
578579 to ``end_lr`` . If False, the learning rate is monotone decreasing. Default: False.
579580 last_epoch (int, optional): The index of last epoch. Can be set to restart training. Default: -1, means initial learning rate.
@@ -644,6 +645,7 @@ def __init__(self,
644645 decay_steps , int ), " 'decay_steps' must be a positive integer."
645646 self .decay_steps = decay_steps
646647 self .end_lr = end_lr
648+ assert power > 0.0 , " 'power' must be greater than 0.0 so that the learning rate will decay."
647649 self .power = power
648650 self .cycle = cycle
649651 super (PolynomialDecay , self ).__init__ (learning_rate , last_epoch ,
@@ -820,7 +822,7 @@ class ExponentialDecay(LRScheduler):
820822 Args:
821823 learning_rate (float): The initial learning rate. It is a python float number.
822824 gamma (float): The Ratio that the learning rate will be reduced. ``new_lr = origin_lr * gamma`` .
823- It should be less than 1.0.
825+ It should be in interval (0.0, 1.0) .
824826 last_epoch (int, optional): The index of last epoch. Can be set to restart training. Default: -1, means initial learning rate.
825827 verbose (bool, optional): If ``True``, prints a message to stdout for each update. Default: ``False`` .
826828
@@ -878,6 +880,7 @@ class ExponentialDecay(LRScheduler):
878880 """
879881
880882 def __init__ (self , learning_rate , gamma , last_epoch = - 1 , verbose = False ):
883+ assert gamma > 0.0 and gamma < 1.0 , " 'gamma' must be in interval (0.0, 1.0) so that the learning rate will decay."
881884 self .gamma = gamma
882885 super (ExponentialDecay , self ).__init__ (learning_rate , last_epoch ,
883886 verbose )
0 commit comments