In [1]:
import torch
from torch.utils.model_zoo import load_url
from seq2seq.tools.inference import Translator
In [2]:
checkpoint = load_url('https://dl.dropboxusercontent.com/s/wdmyxh8etn2dj61/transformer_en_de-d4bd08ed.pth',
map_location=lambda storage, loc: storage)
model = Translator(checkpoint=checkpoint,
beam_size=1,
length_normalization_factor=0.6)
In [3]:
model.translate('hello world')
Out[3]:
In [4]:
model.translate('This seem to be working good')
Out[4]:
In [5]:
model.translate('may the force be with you')
Out[5]:
In [6]:
model.translate("good morning to you all!")
Out[6]:
In [7]:
model.translate("I don't speak German")
Out[7]:
In [8]:
model.translate("those are some silly mistakes!")
Out[8]:
In [9]:
model.translate(["what is your favourite color?", "What is the airspeed velocity of an unladen Swallow?"])
Out[9]: