wandb
allennlp.training.callbacks.wandb
WandBCallback¶
@TrainerCallback.register("wandb")
class WandBCallback(LogWriterCallback):
| def __init__(
| self,
| serialization_dir: str,
| summary_interval: int = 100,
| distribution_interval: Optional[int] = None,
| batch_size_interval: Optional[int] = None,
| should_log_parameter_statistics: bool = True,
| should_log_learning_rate: bool = False,
| project: Optional[str] = None,
| entity: Optional[str] = None,
| group: Optional[str] = None,
| name: Optional[str] = None,
| notes: Optional[str] = None,
| tags: Optional[List[str]] = None,
| watch_model: bool = True,
| files_to_save: Tuple[str, ...] = ("config.json", "out.log"),
| wandb_kwargs: Optional[Dict[str, Any]] = None
| ) -> None
Logs training runs to Weights & Biases.
Note
This requires the environment variable 'WANDB_API_KEY' to be set in order to authenticate with Weights & Biases. If not set, you may be prompted to log in or upload the experiment to an anonymous account.
In addition to the parameters that LogWriterCallback
takes, there are several other
parameters specific to WandBWriter
listed below.
Parameters¶
- project :
Optional[str]
, optional (default =None
)
The name of the W&B project to save the training run to. - entity :
Optional[str]
, optional (default =None
)
The username or team name to send the run to. If not specified, the default will be used. - group :
Optional[str]
, optional (default =None
)
Specify a group to organize individual runs into a larger experiment. - name :
Optional[str]
, optional (default =None
)
A short display name for this run, which is how you'll identify this run in the W&B UI. By default a random name is generated. - notes :
Optional[str]
, optional (default =None
)
A description of the run. - tags :
Optional[List[str]]
, optional (default =None
)
Tags to assign to the training run in W&B. - watch_model :
bool
, optional (default =True
)
Whether or not W&B should watch theModel
. - files_to_save :
Tuple[str, ...]
, optional (default =("config.json", "out.log")
)
Extra files in the serialization directory to save to the W&B training run. - wandb_kwargs :
Optional[Dict[str, Any]]
, optional (default =None
)
Additional key word arguments to pass towandb.init()
.
log_scalars¶
class WandBCallback(LogWriterCallback):
| ...
| def log_scalars(
| self,
| scalars: Dict[str, Union[int, float]],
| log_prefix: str = "",
| epoch: Optional[int] = None
| ) -> None
log_tensors¶
class WandBCallback(LogWriterCallback):
| ...
| def log_tensors(
| self,
| tensors: Dict[str, torch.Tensor],
| log_prefix: str = "",
| epoch: Optional[int] = None
| ) -> None
on_start¶
class WandBCallback(LogWriterCallback):
| ...
| def on_start(
| self,
| trainer: "GradientDescentTrainer",
| is_primary: bool = True,
| **kwargs
| ) -> None
close¶
class WandBCallback(LogWriterCallback):
| ...
| def close(self) -> None
state_dict¶
class WandBCallback(LogWriterCallback):
| ...
| def state_dict(self) -> Dict[str, Any]
load_state_dict¶
class WandBCallback(LogWriterCallback):
| ...
| def load_state_dict(self, state_dict: Dict[str, Any]) -> None