From 8fbff648a7588cb57e72b96d6332aef0e39c0463 Mon Sep 17 00:00:00 2001 From: Mateusz Klimaszewski <mk.klimaszewski@gmail.com> Date: Mon, 4 Jan 2021 14:26:33 +0100 Subject: [PATCH] Set default projection dim in transformer embedder. --- combo/models/embeddings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/combo/models/embeddings.py b/combo/models/embeddings.py index 49f1ab9..d8e9d7a 100644 --- a/combo/models/embeddings.py +++ b/combo/models/embeddings.py @@ -107,7 +107,7 @@ class TransformersWordEmbedder(token_embedders.PretrainedTransformerMismatchedEm def __init__(self, model_name: str, - projection_dim: int, + projection_dim: int = 0, projection_activation: Optional[allen_nn.Activation] = lambda x: x, projection_dropout_rate: Optional[float] = 0.0, freeze_transformer: bool = True, -- GitLab