Skip to content

learning_rate_scheduler

[ allennlp.training.learning_rate_schedulers.learning_rate_scheduler ]


LearningRateScheduler Objects#

class LearningRateScheduler(Scheduler,  Registrable):
 | def __init__(
 |     self,
 |     optimizer: torch.optim.Optimizer,
 |     last_epoch: int = -1
 | ) -> None

get_values#

 | @overrides
 | def get_values(self)

StepLearningRateScheduler Objects#

class StepLearningRateScheduler(_PyTorchLearningRateSchedulerWrapper):
 | def __init__(
 |     self,
 |     optimizer: Optimizer,
 |     step_size: int,
 |     gamma: float = 0.1,
 |     last_epoch: int = -1
 | ) -> None

Registered as a LearningRateScheduler with name "step". The "optimizer" argument does not get an entry in a configuration file for the object.

MultiStepLearningRateScheduler Objects#

class MultiStepLearningRateScheduler(_PyTorchLearningRateSchedulerWrapper):
 | def __init__(
 |     self,
 |     optimizer: Optimizer,
 |     milestones: List[int],
 |     gamma: float = 0.1,
 |     last_epoch: int = -1
 | ) -> None

Registered as a LearningRateScheduler with name "multi_step". The "optimizer" argument does not get an entry in a configuration file for the object.

ExponentialLearningRateScheduler Objects#

class ExponentialLearningRateScheduler(_PyTorchLearningRateSchedulerWrapper):
 | def __init__(
 |     self,
 |     optimizer: Optimizer,
 |     gamma: float = 0.1,
 |     last_epoch: int = -1
 | ) -> None

Registered as a LearningRateScheduler with name "exponential". The "optimizer" argument does not get an entry in a configuration file for the object.

ReduceOnPlateauLearningRateScheduler Objects#

class ReduceOnPlateauLearningRateScheduler(_PyTorchLearningRateSchedulerWithMetricsWrapper):
 | def __init__(
 |     self,
 |     optimizer: Optimizer,
 |     mode: str = "min",
 |     factor: float = 0.1,
 |     patience: int = 10,
 |     verbose: bool = False,
 |     threshold_mode: str = "rel",
 |     threshold: float = 1e-4,
 |     cooldown: int = 0,
 |     min_lr: Union[float, List[float]] = 0,
 |     eps: float = 1e-8
 | ) -> None

Registered as a LearningRateScheduler with name "reduce_on_plateau". The "optimizer" argument does not get an entry in a configuration file for the object.