activation_layer
allennlp.modules.transformer.activation_layer
ActivationLayer¶
class ActivationLayer(TransformerModule, FromParams):
| def __init__(
| self,
| hidden_size: int,
| intermediate_size: int,
| activation: Union[str, torch.nn.Module],
| pool: bool = False
| )
get_output_dim¶
class ActivationLayer(TransformerModule, FromParams):
| ...
| def get_output_dim(self) -> int
forward¶
class ActivationLayer(TransformerModule, FromParams):
| ...
| def forward(self, hidden_states)