-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathPhase01-Part01.py
46 lines (39 loc) · 1.35 KB
/
Phase01-Part01.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import sys
import parsivar
import json
import hazm
from hazm import Stemmer, Lemmatizer
from parsivar import Normalizer, Tokenizer
from hazm import stopwords_list
def readtext():
with open('stopwords.txt', 'r', encoding='utf-8') as f:
lines = f.read().splitlines()
f.close()
return lines
def normalize(object):
normalizer = Normalizer(pinglish_conversion_needed=True)
normal = normalizer.normalize(object)
return normal
def tokenize(object):
tokenizer = Tokenizer()
tokens = tokenizer.tokenize_words(object)
return tokens
def stopword(object):
for i in object:
if i in readtext():
object.remove(i)
return object
def stemmerandlemmatizer(object):
stemmer = Stemmer()
lemmatizer = Lemmatizer()
for i in range(len(object)):
object[i] = stemmer.stem(object[i])
object[i] = lemmatizer.lemmatize(object[i])
return object
def test():
text = "من به تاریخ ۱۱ شهریور به دانشگاه می روم. man be tarikhe 11 shahrivar be danshgah miravam"
print("Normal shode : ", normalize(text))
print("Token haye be dast amade : ", tokenize(normalize(text)))
print("Hazf kalamate por tekrar : ", stopword(tokenize(normalize(text))))
print("Hazf kalamate ba rishe yeksan : ", stemmerandlemmatizer(stopword(tokenize(normalize(text)))))
test()