Skip to content

linear

allennlp_models.lm.modules.language_model_heads.linear

[SOURCE]


LinearLanguageModelHead#

@LanguageModelHead.register("linear")
class LinearLanguageModelHead(LanguageModelHead):
 | def __init__(
 |     self,
 |     vocab: Vocabulary,
 |     input_dim: int,
 |     vocab_namespace: str
 | ) -> None

Uses torch.nn.Linear as a language model head. Does nothing else fancy. This was intended largely for testing code with small models and simple components. It's likely that you would want something nicer for actually training a language model, such as tying weights with an input embedding, or an adaptive softmax, or something.

get_input_dim#

class LinearLanguageModelHead(LanguageModelHead):
 | ...
 | def get_input_dim(self) -> int

get_output_dim#

class LinearLanguageModelHead(LanguageModelHead):
 | ...
 | def get_output_dim(self) -> int

forward#

class LinearLanguageModelHead(LanguageModelHead):
 | ...
 | def forward(self, hidden_states: torch.Tensor) -> torch.Tensor