Skip to content

learning_rate_scheduler

allennlp.training.learning_rate_schedulers.learning_rate_scheduler

[SOURCE]


LearningRateScheduler

class LearningRateScheduler(Scheduler,  Registrable):
 | def __init__(
 |     self,
 |     optimizer: torch.optim.Optimizer,
 |     last_epoch: int = -1
 | ) -> None

get_values

class LearningRateScheduler(Scheduler,  Registrable):
 | ...
 | @overrides
 | def get_values(self)

StepLearningRateScheduler

@LearningRateScheduler.register("step")
class StepLearningRateScheduler(_PyTorchLearningRateSchedulerWrapper):
 | def __init__(
 |     self,
 |     optimizer: Optimizer,
 |     step_size: int,
 |     gamma: float = 0.1,
 |     last_epoch: int = -1
 | ) -> None

Registered as a LearningRateScheduler with name "step". The "optimizer" argument does not get an entry in a configuration file for the object.

MultiStepLearningRateScheduler

@LearningRateScheduler.register("multi_step")
class MultiStepLearningRateScheduler(_PyTorchLearningRateSchedulerWrapper):
 | def __init__(
 |     self,
 |     optimizer: Optimizer,
 |     milestones: List[int],
 |     gamma: float = 0.1,
 |     last_epoch: int = -1
 | ) -> None

Registered as a LearningRateScheduler with name "multi_step". The "optimizer" argument does not get an entry in a configuration file for the object.

ExponentialLearningRateScheduler

@LearningRateScheduler.register("exponential")
class ExponentialLearningRateScheduler(_PyTorchLearningRateSchedulerWrapper):
 | def __init__(
 |     self,
 |     optimizer: Optimizer,
 |     gamma: float = 0.1,
 |     last_epoch: int = -1
 | ) -> None

Registered as a LearningRateScheduler with name "exponential". The "optimizer" argument does not get an entry in a configuration file for the object.

ReduceOnPlateauLearningRateScheduler

@LearningRateScheduler.register("reduce_on_plateau")
class ReduceOnPlateauLearningRateScheduler(_PyTorchLearningRateSchedulerWithMetricsWrapper):
 | def __init__(
 |     self,
 |     optimizer: Optimizer,
 |     mode: str = "min",
 |     factor: float = 0.1,
 |     patience: int = 10,
 |     verbose: bool = False,
 |     threshold_mode: str = "rel",
 |     threshold: float = 1e-4,
 |     cooldown: int = 0,
 |     min_lr: Union[float, List[float]] = 0,
 |     eps: float = 1e-8
 | ) -> None

Registered as a LearningRateScheduler with name "reduce_on_plateau". The "optimizer" argument does not get an entry in a configuration file for the object.

ConstantLearningRateScheduler

@LearningRateScheduler.register("constant")
class ConstantLearningRateScheduler(_PyTorchLearningRateSchedulerWrapper):
 | def __init__(self, optimizer: Optimizer, last_epoch: int = -1) -> None

Registered as a LearningRateScheduler with name "constant". The "optimizer" argument does not get an entry in a configuration file for the object.

ConstantWithWarmupLearningRateScheduler

@LearningRateScheduler.register("constant_with_warmup")
class ConstantWithWarmupLearningRateScheduler(_PyTorchLearningRateSchedulerWrapper):
 | def __init__(
 |     self,
 |     optimizer: Optimizer,
 |     num_warmup_steps: int,
 |     last_epoch: int = -1
 | ) -> None

Registered as a LearningRateScheduler with name "constant_with_warmup". The "optimizer" argument does not get an entry in a configuration file for the object.

CosineWithWarmupLearningRateScheduler

@LearningRateScheduler.register("cosine_with_warmup")
class CosineWithWarmupLearningRateScheduler(_PyTorchLearningRateSchedulerWrapper):
 | def __init__(
 |     self,
 |     optimizer: Optimizer,
 |     num_warmup_steps: int,
 |     num_training_steps: int,
 |     num_cycles: float = 0.5,
 |     last_epoch: int = -1
 | ) -> None

Registered as a LearningRateScheduler with name "cosine_with_warmup". The "optimizer" argument does not get an entry in a configuration file for the object.

CosineHardRestartsWithWarmupLearningRateScheduler

@LearningRateScheduler.register("cosine_hard_restarts_with_warmup")
class CosineHardRestartsWithWarmupLearningRateScheduler(_PyTorchLearningRateSchedulerWrapper):
 | def __init__(
 |     self,
 |     optimizer: Optimizer,
 |     num_warmup_steps: int,
 |     num_training_steps: int,
 |     num_cycles: int = 1,
 |     last_epoch: int = -1
 | ) -> None

Registered as a LearningRateScheduler with name "cosine_hard_restarts_with_warmup". The "optimizer" argument does not get an entry in a configuration file for the object.