diff --git a/combo/combo_model.py b/combo/combo_model.py
index d87d514adeddb223bec672e43d664d347f97cf82..162a3b18522daeae810cbc8ecf930fc0ad1b4972 100644
--- a/combo/combo_model.py
+++ b/combo/combo_model.py
@@ -24,7 +24,7 @@ from combo.nn.utils import get_text_field_mask
 from combo.predictors import Predictor
 from combo.utils import metrics
 from combo.utils import ConfigurationError
-from modules.seq2seq_encoders.transformer_encoder import TransformerEncoder
+from combo.modules.seq2seq_encoders.transformer_encoder import TransformerEncoder
 
 
 @Registry.register("semantic_multitask")
diff --git a/combo/main.py b/combo/main.py
index 79e98208ce36c59df835363480dfab6e402e0f10..73b3a26ef2bfaafdf328fbfbfabc8ca96e887eef 100755
--- a/combo/main.py
+++ b/combo/main.py
@@ -4,8 +4,10 @@ import os
 import pathlib
 import tempfile
 from itertools import chain
+import random
 from typing import Dict, Optional, Any, Tuple
 
+import numpy
 import torch
 from absl import app, flags
 import pytorch_lightning as pl
@@ -47,6 +49,7 @@ flags.DEFINE_integer(name="n_cuda_devices", default=-1,
                      help="Number of devices to train on (default -1 auto mode - train on as many as possible)")
 flags.DEFINE_string(name="output_file", default="output.log",
                     help="Predictions result file.")
+flags.DEFINE_integer(name="seed", default=None, help="Random seed.")
 
 # Training flags
 flags.DEFINE_string(name="training_data_path", default="", help="Training data path(s)")
@@ -293,7 +296,17 @@ def read_model_from_config(logging_prefix: str) -> Optional[
     return model, dataset_reader, training_data_loader, validation_data_loader, vocabulary
 
 
+def set_seed(seed: int) -> None:
+    random.seed(seed)
+    numpy.random.seed(seed)
+    torch.manual_seed(seed)
+
+
 def run(_):
+
+    if FLAGS.seed:
+        set_seed(FLAGS.seed)
+
     if FLAGS.mode == 'train':
         model, dataset_reader, training_data_loader, validation_data_loader, vocabulary = None, None, None, None, None
 
diff --git a/combo/modules/seq2seq_encoders/transformer_encoder.py b/combo/modules/seq2seq_encoders/transformer_encoder.py
index a49100def51df028bd8427ea65e0ab8bd818905a..3b126fac38009a82cfbe3efa259c7cd2c6a4a2af 100644
--- a/combo/modules/seq2seq_encoders/transformer_encoder.py
+++ b/combo/modules/seq2seq_encoders/transformer_encoder.py
@@ -1,18 +1,11 @@
 from typing import Optional
 
-from overrides import overrides
 import torch
 from torch import nn
 
 from combo.modules.encoder import _EncoderBase
 from combo.config.from_parameters import FromParameters, register_arguments
-
-# from modules.seq2seq_encoders.seq2seq_encoder import Seq2SeqEncoder
-from nn.utils import add_positional_features
-
-
-# from allennlp.modules.seq2seq_encoders.seq2seq_encoder import Seq2SeqEncoder
-# from allennlp.nn.util import add_positional_features
+from combo.nn.utils import add_positional_features
 
 
 class TransformerEncoder(_EncoderBase, FromParameters):