Skip to content

pass_through_encoder

allennlp.modules.seq2seq_encoders.pass_through_encoder

[SOURCE]


PassThroughEncoder

@Seq2SeqEncoder.register("pass_through")
class PassThroughEncoder(Seq2SeqEncoder):
 | def __init__(self, input_dim: int) -> None

This class allows you to specify skipping a Seq2SeqEncoder just by changing a configuration file. This is useful for ablations and measuring the impact of different elements of your model.

Registered as a Seq2SeqEncoder with name "pass_through".

get_input_dim

class PassThroughEncoder(Seq2SeqEncoder):
 | ...
 | def get_input_dim(self) -> int

get_output_dim

class PassThroughEncoder(Seq2SeqEncoder):
 | ...
 | def get_output_dim(self) -> int

is_bidirectional

class PassThroughEncoder(Seq2SeqEncoder):
 | ...
 | def is_bidirectional(self)

forward

class PassThroughEncoder(Seq2SeqEncoder):
 | ...
 | def forward(
 |     self,
 |     inputs: torch.Tensor,
 |     mask: torch.BoolTensor = None
 | ) -> torch.Tensor

Parameters

  • inputs : torch.Tensor
    A tensor of shape (batch_size, timesteps, input_dim)
  • mask : torch.BoolTensor, optional (default = None)
    A tensor of shape (batch_size, timesteps).

Returns

  • A tensor of shape (batch_size, timesteps, output_dim),
  • where output_dim = input_dim.