transformer_pooler
allennlp.modules.transformer.transformer_pooler
TransformerPooler¶
class TransformerPooler(ActivationLayer, FromParams):
| def __init__(
| self,
| hidden_size: int,
| intermediate_size: int,
| activation: Union[str, torch.nn.Module] = "relu"
| )