Skip to content

bimodal_connection_layer

allennlp.modules.transformer.bimodal_connection_layer

[SOURCE]


BiModalOutput

class BiModalOutput(TransformerModule,  FromParams):
 | def __init__(
 |     self,
 |     hidden_size1: int,
 |     hidden_size2: int,
 |     combined_hidden_size: int,
 |     dropout1: float,
 |     dropout2: float
 | )

forward

class BiModalOutput(TransformerModule,  FromParams):
 | ...
 | def forward(
 |     self,
 |     hidden_states1,
 |     input_tensor1,
 |     hidden_states2,
 |     input_tensor2
 | )

BiModalConnectionLayer

class BiModalConnectionLayer(TransformerModule,  FromParams):
 | def __init__(
 |     self,
 |     hidden_size1: int,
 |     hidden_size2: int,
 |     combined_hidden_size: int,
 |     intermediate_size1: int,
 |     intermediate_size2: int,
 |     num_attention_heads: int,
 |     dropout1: float,
 |     dropout2: float,
 |     activation: str
 | )

forward

class BiModalConnectionLayer(TransformerModule,  FromParams):
 | ...
 | def forward(
 |     self,
 |     input_tensor1,
 |     attention_mask1,
 |     input_tensor2,
 |     attention_mask2,
 |     co_attention_mask=None
 | )