In [2]:
from nltk.stem import PorterStemmer
In [3]:
example_words = ["python","pythoner","pythoning","pythoned","pythonly"]
In [4]:
ps = PorterStemmer()
In [6]:
for w in example_words:
print(ps.stem(w))
In [7]:
from nltk.tokenize import word_tokenize
In [9]:
new_text = "It is important to by very pythonly while you are pythoning with python. All pythoners have pythoned poorly at least once."
In [10]:
words = word_tokenize(new_text)
In [11]:
for w in words:
print(ps.stem(w))
In [ ]: