Skip to content

gated_sum

allennlp.modules.gated_sum

[SOURCE]


GatedSum

class GatedSum(torch.nn.Module):
 | def __init__(
 |     self,
 |     input_dim: int,
 |     activation: Activation = torch.nn.Sigmoid()
 | ) -> None

This Module represents a gated sum of two tensors a and b. Specifically:

f = activation(W [a; b])
out = f * a + (1 - f) * b

Parameters

  • input_dim : int
    The dimensionality of the input. We assume the input have shape (..., input_dim).
  • activation : Activation, optional (default = torch.nn.Sigmoid())
    The activation function to use.

get_input_dim

class GatedSum(torch.nn.Module):
 | ...
 | def get_input_dim(self)

get_output_dim

class GatedSum(torch.nn.Module):
 | ...
 | def get_output_dim(self)

forward

class GatedSum(torch.nn.Module):
 | ...
 | def forward(
 |     self,
 |     input_a: torch.Tensor,
 |     input_b: torch.Tensor
 | ) -> torch.Tensor