Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
C
combo
Manage
Activity
Members
Labels
Plan
Issues
20
Issue boards
Milestones
Wiki
Redmine
Code
Merge requests
2
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container Registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Syntactic Tools
combo
Commits
ad097cf0
Commit
ad097cf0
authored
1 year ago
by
Maja Jablonska
Browse files
Options
Downloads
Patches
Plain Diff
Batch sizes as CLI parameters
parent
06c4376b
Branches
Branches containing commit
Tags
Tags containing commit
1 merge request
!46
Merge COMBO 3.0 into master
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
combo/main.py
+9
-13
9 additions, 13 deletions
combo/main.py
with
9 additions
and
13 deletions
combo/main.py
+
9
−
13
View file @
ad097cf0
...
@@ -48,14 +48,14 @@ flags.DEFINE_string(name="training_data_path", default="", help="Training data p
...
@@ -48,14 +48,14 @@ flags.DEFINE_string(name="training_data_path", default="", help="Training data p
flags
.
DEFINE_alias
(
name
=
"
training_data
"
,
original_name
=
"
training_data_path
"
)
flags
.
DEFINE_alias
(
name
=
"
training_data
"
,
original_name
=
"
training_data_path
"
)
flags
.
DEFINE_string
(
name
=
"
validation_data_path
"
,
default
=
""
,
help
=
"
Validation data path(s)
"
)
flags
.
DEFINE_string
(
name
=
"
validation_data_path
"
,
default
=
""
,
help
=
"
Validation data path(s)
"
)
flags
.
DEFINE_alias
(
name
=
"
validation_data
"
,
original_name
=
"
validation_data_path
"
)
flags
.
DEFINE_alias
(
name
=
"
validation_data
"
,
original_name
=
"
validation_data_path
"
)
flags
.
DEFINE_string
(
name
=
"
pretrained_tokens
"
,
default
=
""
,
help
=
"
Pretrained tokens embeddings path
"
)
flags
.
DEFINE_integer
(
name
=
"
lemmatizer_embedding_dim
"
,
default
=
300
,
flags
.
DEFINE_integer
(
name
=
"
lemmatizer_embedding_dim
"
,
default
=
300
,
help
=
"
Lemmatizer embeddings dim
"
)
help
=
"
Lemmatizer embeddings dim
"
)
flags
.
DEFINE_integer
(
name
=
"
num_epochs
"
,
default
=
400
,
flags
.
DEFINE_integer
(
name
=
"
num_epochs
"
,
default
=
400
,
help
=
"
Epochs num
"
)
help
=
"
Epochs num
"
)
flags
.
DEFINE_integer
(
name
=
"
word_batch_size
"
,
default
=
2500
,
flags
.
DEFINE_integer
(
name
=
"
batch_size
"
,
default
=
256
,
help
=
"
Minimum words in batch
"
)
help
=
"
Batch size
"
)
flags
.
DEFINE_integer
(
name
=
"
batches_per_epoch
"
,
default
=
16
,
help
=
"
Number of batches per epoch
"
)
flags
.
DEFINE_string
(
name
=
"
pretrained_transformer_name
"
,
default
=
""
,
flags
.
DEFINE_string
(
name
=
"
pretrained_transformer_name
"
,
default
=
""
,
help
=
"
Pretrained transformer model name (see transformers from HuggingFace library for list of
"
help
=
"
Pretrained transformer model name (see transformers from HuggingFace library for list of
"
"
available models) for transformers based embeddings.
"
)
"
available models) for transformers based embeddings.
"
)
...
@@ -90,8 +90,6 @@ flags.DEFINE_string(name="input_file", default=None,
...
@@ -90,8 +90,6 @@ flags.DEFINE_string(name="input_file", default=None,
help
=
"
File to predict path
"
)
help
=
"
File to predict path
"
)
flags
.
DEFINE_boolean
(
name
=
"
conllu_format
"
,
default
=
True
,
flags
.
DEFINE_boolean
(
name
=
"
conllu_format
"
,
default
=
True
,
help
=
"
Prediction based on conllu format (instead of raw text).
"
)
help
=
"
Prediction based on conllu format (instead of raw text).
"
)
flags
.
DEFINE_integer
(
name
=
"
batch_size
"
,
default
=
1
,
help
=
"
Prediction batch size.
"
)
flags
.
DEFINE_boolean
(
name
=
"
silent
"
,
default
=
True
,
flags
.
DEFINE_boolean
(
name
=
"
silent
"
,
default
=
True
,
help
=
"
Silent prediction to file (without printing to console).
"
)
help
=
"
Silent prediction to file (without printing to console).
"
)
flags
.
DEFINE_boolean
(
name
=
"
finetuning
"
,
default
=
False
,
flags
.
DEFINE_boolean
(
name
=
"
finetuning
"
,
default
=
False
,
...
@@ -305,6 +303,8 @@ def _get_ext_vars(finetuning: bool = False) -> Dict:
...
@@ -305,6 +303,8 @@ def _get_ext_vars(finetuning: bool = False) -> Dict:
},
},
"
data_loader
"
:
{
"
data_loader
"
:
{
"
data_path
"
:
(
"
,
"
.
join
(
FLAGS
.
training_data_path
if
not
finetuning
else
FLAGS
.
finetuning_training_data_path
)),
"
data_path
"
:
(
"
,
"
.
join
(
FLAGS
.
training_data_path
if
not
finetuning
else
FLAGS
.
finetuning_training_data_path
)),
"
batch_size
"
:
FLAGS
.
batch_size
,
"
batches_per_epoch
"
:
FLAGS
.
batches_per_epoch
,
"
parameters
"
:
{
"
parameters
"
:
{
"
reader
"
:
{
"
reader
"
:
{
"
parameters
"
:
{
"
parameters
"
:
{
...
@@ -323,6 +323,8 @@ def _get_ext_vars(finetuning: bool = False) -> Dict:
...
@@ -323,6 +323,8 @@ def _get_ext_vars(finetuning: bool = False) -> Dict:
},
},
"
validation_data_loader
"
:
{
"
validation_data_loader
"
:
{
"
data_path
"
:
(
"
,
"
.
join
(
FLAGS
.
validation_data_path
if
not
finetuning
else
FLAGS
.
finetuning_validation_data_path
)),
"
data_path
"
:
(
"
,
"
.
join
(
FLAGS
.
validation_data_path
if
not
finetuning
else
FLAGS
.
finetuning_validation_data_path
)),
"
batch_size
"
:
FLAGS
.
batch_size
,
"
batches_per_epoch
"
:
FLAGS
.
batches_per_epoch
,
"
parameters
"
:
{
"
parameters
"
:
{
"
reader
"
:
{
"
reader
"
:
{
"
parameters
"
:
{
"
parameters
"
:
{
...
@@ -351,13 +353,7 @@ def _get_ext_vars(finetuning: bool = False) -> Dict:
...
@@ -351,13 +353,7 @@ def _get_ext_vars(finetuning: bool = False) -> Dict:
}
}
}
}
}
}
},
}
"
vocabulary
"
:
{
"
parameters
"
:
{
"
pretrained_files
"
:
FLAGS
.
pretrained_tokens
}
},
"
word_batch_size
"
:
int
(
FLAGS
.
word_batch_size
),
}
}
return
to_override
return
to_override
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment