In [9]:
import nltk
from nltk.tokenize import word_tokenize
from nltk.tag import pos_tag, map_tag
In [6]:
text = word_tokenize("And now for something completely different")
print(text)
print(type(text))
nltk.pos_tag(text)
Out[6]:
In [14]:
pos_tag(word_tokenize("John's big idea is not all that bad."), tagset='universal')
Out[14]:
In [ ]: