learning_rate_scheduler
allennlp.training.learning_rate_schedulers.learning_rate_scheduler
LearningRateScheduler#
class LearningRateScheduler(Scheduler, Registrable):
| def __init__(
| self,
| optimizer: torch.optim.Optimizer,
| last_epoch: int = -1
| ) -> None
get_values#
class LearningRateScheduler(Scheduler, Registrable):
| ...
| @overrides
| def get_values(self)
StepLearningRateScheduler#
@LearningRateScheduler.register("step")
class StepLearningRateScheduler(_PyTorchLearningRateSchedulerWrapper):
| def __init__(
| self,
| optimizer: Optimizer,
| step_size: int,
| gamma: float = 0.1,
| last_epoch: int = -1
| ) -> None
Registered as a LearningRateScheduler
with name "step". The "optimizer" argument does not get
an entry in a configuration file for the object.
MultiStepLearningRateScheduler#
@LearningRateScheduler.register("multi_step")
class MultiStepLearningRateScheduler(_PyTorchLearningRateSchedulerWrapper):
| def __init__(
| self,
| optimizer: Optimizer,
| milestones: List[int],
| gamma: float = 0.1,
| last_epoch: int = -1
| ) -> None
Registered as a LearningRateScheduler
with name "multi_step". The "optimizer" argument does
not get an entry in a configuration file for the object.
ExponentialLearningRateScheduler#
@LearningRateScheduler.register("exponential")
class ExponentialLearningRateScheduler(_PyTorchLearningRateSchedulerWrapper):
| def __init__(
| self,
| optimizer: Optimizer,
| gamma: float = 0.1,
| last_epoch: int = -1
| ) -> None
Registered as a LearningRateScheduler
with name "exponential". The "optimizer" argument does
not get an entry in a configuration file for the object.
ReduceOnPlateauLearningRateScheduler#
@LearningRateScheduler.register("reduce_on_plateau")
class ReduceOnPlateauLearningRateScheduler(_PyTorchLearningRateSchedulerWithMetricsWrapper):
| def __init__(
| self,
| optimizer: Optimizer,
| mode: str = "min",
| factor: float = 0.1,
| patience: int = 10,
| verbose: bool = False,
| threshold_mode: str = "rel",
| threshold: float = 1e-4,
| cooldown: int = 0,
| min_lr: Union[float, List[float]] = 0,
| eps: float = 1e-8
| ) -> None
Registered as a LearningRateScheduler
with name "reduce_on_plateau". The "optimizer" argument
does not get an entry in a configuration file for the object.