learning_rate_scheduler
allennlp.training.learning_rate_schedulers.learning_rate_scheduler
LearningRateScheduler¶
class LearningRateScheduler(Scheduler, Registrable):
| def __init__(
| self,
| optimizer: torch.optim.Optimizer,
| last_epoch: int = -1
| ) -> None
get_values¶
class LearningRateScheduler(Scheduler, Registrable):
| ...
| def get_values(self)
ConstantLearningRateScheduler¶
@LearningRateScheduler.register("constant")
class ConstantLearningRateScheduler(_PyTorchLearningRateSchedulerWrapper):
| def __init__(self, optimizer: Optimizer, last_epoch: int = -1) -> None
Registered as a LearningRateScheduler
with name "constant". The
"optimizer" argument does not get an entry in a configuration file for the
object.
Example¶
Config for using the ConstantLearningRateScheduler
Learning Rate
Scheduler.
{
...
"trainer":{
...
"learning_rate_scheduler": "constant",
...
}
}
optimizer
key to the Learning rate scheduler.
ConstantWithWarmupLearningRateScheduler¶
@LearningRateScheduler.register("constant_with_warmup")
class ConstantWithWarmupLearningRateScheduler(_PyTorchLearningRateSchedulerWrapper):
| def __init__(
| self,
| optimizer: Optimizer,
| num_warmup_steps: int,
| last_epoch: int = -1
| ) -> None
Registered as a LearningRateScheduler
with name "constant_with_warmup".
The "optimizer" argument does not get an entry in a configuration file for
the object.
Parameters¶
- optimizer :
torch.optim.Optimizer
This argument does not get an entry in a configuration file for the object. - num_warmup_steps :
int
The number of steps to linearly increase the learning rate.
Example¶
Config for using the ConstantWithWarmupLearningRateScheduler
Learning Rate
Scheduler with num_warmup_steps
set 100
.
{
...
"trainer":{
...
"learning_rate_scheduler": {
"type": "constant_with_warmup",
"num_warmup_steps": 100
},
...
}
}
<a id="allennlp.training.learning_rate_schedulers.learning_rate_scheduler.CosineWithWarmupLearningRateScheduler"></a>
## CosineWithWarmupLearningRateScheduler
```python
@LearningRateScheduler.register("cosine_with_warmup")
class CosineWithWarmupLearningRateScheduler(_PyTorchLearningRateSchedulerWrapper):
| def __init__(
| self,
| optimizer: Optimizer,
| num_warmup_steps: int,
| num_training_steps: int,
| num_cycles: float = 0.5,
| last_epoch: int = -1
| ) -> None
Registered as a LearningRateScheduler
with name "cosine_with_warmup". The "optimizer" argument does
not get an entry in a configuration file for the object.
Parameters¶
- optimizer :
torch.optim.Optimizer
This argument does not get an entry in a configuration file for the object. - num_warmup_steps :
int
The number of steps to linearly increase the learning rate.
Example¶
Config for using the CosineWithWarmupLearningRateScheduler
Learning Rate
Scheduler with num_warmup_steps
set 100
.
{
...
"trainer":{
...
"learning_rate_scheduler": {
"type": "cosine_with_warmup",
"num_warmup_steps": 100
},
...
}
}
<a id="allennlp.training.learning_rate_schedulers.learning_rate_scheduler.CosineHardRestartsWithWarmupLearningRateScheduler"></a>
## CosineHardRestartsWithWarmupLearningRateScheduler
```python
@LearningRateScheduler.register("cosine_hard_restarts_with_warmup")
class CosineHardRestartsWithWarmupLearningRateScheduler(_PyTorchLearningRateSchedulerWrapper):
| def __init__(
| self,
| optimizer: Optimizer,
| num_warmup_steps: int,
| num_training_steps: int,
| num_cycles: int = 1,
| last_epoch: int = -1
| ) -> None
Registered as a LearningRateScheduler
with name
"cosine_hard_restarts_with_warmup". The "optimizer" argument does not get an
entry in a configuration file for the object.
Example¶
Config for using the CosineHardRestartsWithWarmupLearningRateScheduler
Learning Rate Scheduler with num_warmup_steps
set 100
.
```json { ... "trainer":{ ... "learning_rate_scheduler": { "type": "cosine_hard_restarts_with_warmup", "num_warmup_steps": 100 }, ... } }