Commit cf970d17 authored by Piotr Miłkowski's avatar Piotr Miłkowski

Merge branch 'feature-257' into 'developer'

Feature 257

See merge request !1
parents 2ca1a0db af6c50ce
Pipeline #2818 passed with stage
in 24 seconds
.idea
/idea
log.txt
__pycache__
FROM clarinpl/python:3.8
WORKDIR /home/worker
COPY ./src ./src
COPY ./main.py .
COPY ./requirements.txt .
COPY ./config.ini .
RUN apt-get install -y build-essential libffi-dev
RUN pip install --index-url https://pypi.clarin-pl.eu/simple/ -r requirements.txt
RUN wget https://dl.fbaipublicfiles.com/fasttext/supervised-models/lid.176.bin
CMD ["python3.8", "main.py"]
# predict-lang
# Detect language
PredictLangWorker to detect language from text.
## Docker
Use docker to build and run image:
```
docker build -t <name> .
docker run -v /samba:/samba <name>
```
[service]
tool = predictLang
root = /samba/requests/
rabbit_host = 10.17.0.85
rabbit_user = clarin
rabbit_password = clarin123
[tool]
workers_number = 1
[logging]
port = 9980
local_log_level = INFO
import nlp_ws
from src.predict_lang import PredictLangWorker
if __name__ == '__main__':
nlp_ws.NLPService.main(PredictLangWorker)
import fasttext
import nlp_ws
class PredictLangWorker(nlp_ws.NLPWorker):
def __init__(self):
self.model = fasttext.load_model("lid.176.bin")
def process(self, input_file: str,
task_options: dict,
output_file: str) -> None:
with open(input_file, "r") as f:
text = f.read().replace('\n', ' ')
print(text)
predict = self.model.predict(text)[0][0]
with open(output_file, "w") as f:
f.write(text + predict)
print(predict)
return predict
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment