Skip to content

wandb

allennlp.training.callbacks.wandb

[SOURCE]


WandBCallback

@TrainerCallback.register("wandb")
class WandBCallback(LogWriterCallback):
 | def __init__(
 |     self,
 |     serialization_dir: str,
 |     summary_interval: int = 100,
 |     distribution_interval: Optional[int] = None,
 |     batch_size_interval: Optional[int] = None,
 |     should_log_parameter_statistics: bool = True,
 |     should_log_learning_rate: bool = False,
 |     project: Optional[str] = None,
 |     tags: Optional[List[str]] = None,
 |     watch_model: bool = True,
 |     files_to_save: Tuple[str, ...] = ("config.json", "out.log")
 | ) -> None

Logs training runs to Weights & Biases.

Note

This requires the environment variable 'WANDB_API_KEY' to be set.

In addition to the parameters that LogWriterCallback takes, there are several other parameters specific to WandBWriter listed below.

Parameters

  • project : Optional[str], optional (default = None)
    The name of the W&B project to save the training run to.
  • tags : Optional[List[str]], optional (default = None)
    Tags to assign to the training run in W&B.
  • watch_model : bool, optional (default = True)
    Whether or not W&B should watch the Model.
  • files_to_save : Tuple[str, ...], optional (default = ("config.json", "out.log"))
    Extra files in the serialization directory to save to the W&B training run.

log_scalars

class WandBCallback(LogWriterCallback):
 | ...
 | @overrides
 | def log_scalars(
 |     self,
 |     scalars: Dict[str, Union[int, float]],
 |     log_prefix: str = "",
 |     epoch: Optional[int] = None
 | ) -> None

log_tensors

class WandBCallback(LogWriterCallback):
 | ...
 | @overrides
 | def log_tensors(
 |     self,
 |     tensors: Dict[str, torch.Tensor],
 |     log_prefix: str = "",
 |     epoch: Optional[int] = None
 | ) -> None

on_start

class WandBCallback(LogWriterCallback):
 | ...
 | @overrides
 | def on_start(
 |     self,
 |     trainer: "GradientDescentTrainer",
 |     is_primary: bool = True,
 |     **kwargs
 | ) -> None