Skip to content
Snippets Groups Projects
Commit ea870679 authored by Mateusz Klimaszewski's avatar Mateusz Klimaszewski
Browse files

Add TODO note for next AllenNLP update.

parent 2a413006
No related branches found
No related tags found
2 merge requests!4Documentation,!3Herbert configuration and AllenNLP 1.2.0 update.
This commit is part of merge request !3. Comments created here will be created in the context of that merge request.
...@@ -6,10 +6,11 @@ from allennlp.data import token_indexers, tokenizers ...@@ -6,10 +6,11 @@ from allennlp.data import token_indexers, tokenizers
@data.TokenIndexer.register("pretrained_transformer_mismatched_fixed") @data.TokenIndexer.register("pretrained_transformer_mismatched_fixed")
class PretrainedTransformerMismatchedIndexer(token_indexers.PretrainedTransformerMismatchedIndexer): class PretrainedTransformerMismatchedIndexer(token_indexers.PretrainedTransformerMismatchedIndexer):
"""TODO(mklimasz) Remove during next allennlp update, fixed on allennlp master."""
def __init__(self, model_name: str, namespace: str = "tags", max_length: int = None, def __init__(self, model_name: str, namespace: str = "tags", max_length: int = None,
tokenizer_kwargs: Optional[Dict[str, Any]] = None, **kwargs) -> None: tokenizer_kwargs: Optional[Dict[str, Any]] = None, **kwargs) -> None:
# The matched version v.s. mismatchedńskie # The matched version v.s. mismatched
super().__init__(model_name, namespace, max_length, tokenizer_kwargs, **kwargs) super().__init__(model_name, namespace, max_length, tokenizer_kwargs, **kwargs)
self._matched_indexer = PretrainedTransformerIndexer( self._matched_indexer = PretrainedTransformerIndexer(
model_name, model_name,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment