[ allennlp.common.cached_transformers ]
model_name = None
override_weights_file = None
override_weights_strip_prefix = None
def get( model_name: str, make_copy: bool, override_weights_file: Optional[str] = None, override_weights_strip_prefix: Optional[str] = None ) -> transformers.PreTrainedModel
Returns a transformer model from the cache.
- model_name :
The name of the transformer, for example
- make_copy :
If this is
True, return a copy of the model instead of the cached model itself. If you want to modify the parameters of the model, set this to
True. If you want only part of the model, set this to
False, but make sure to
copy.deepcopy()the bits you are keeping.
- override_weights_file :
If set, this specifies a file from which to load alternate weights that override the weights from huggingface. The file is expected to contain a PyTorch
state_dict, created with
- override_weights_strip_prefix :
If set, strip the given prefix from the state dict when loading it.
def get_tokenizer( model_name: str, **kwargs ) -> transformers.PreTrainedTokenizer