In [1]:
import torch
from seq2seq.tools.inference import Translator
from torch.utils.model_zoo import load_url
In [2]:
checkpoint = load_url('https://dl.dropboxusercontent.com/s/x7no5myh06xs5fu/lstm-b67c3edb.pth?dl=0',
map_location=lambda storage, loc: storage)
model = Translator(checkpoint=checkpoint,
beam_size=8,
length_normalization_factor=1,
cuda=False)
In [3]:
model.translate('hello world')
Out[3]:
In [4]:
model.translate('This seems to be working good')
Out[4]:
In [5]:
model.translate('may the force be with you')
Out[5]:
In [6]:
model.translate("good morning to you all!")
Out[6]:
In [7]:
model.translate("I don't speak German")
Out[7]:
In [8]:
model.translate("those are some silly mistakes!")
Out[8]:
In [9]:
model.translate(["what is your favourite color?", "What is the airspeed velocity of an unladen Swallow?"])
Out[9]: