-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcwi_makeModel.py
164 lines (128 loc) · 4.08 KB
/
cwi_makeModel.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
import operator
import codecs
import re
from collections import Counter
import math
import pickle
from os.path import isfile, join
from nltk.corpus import stopwords
import cwi_readDataset
from nltk.tag.stanford import StanfordNERTagger
import nltk
import os
stop = set(stopwords.words('english'))
os.environ['JAVAHOME'] = "C:/Program Files/Java/jdk1.8.0_65/bin"
classifier = './stanford-ner/classifiers/english.all.3class.distsim.crf.ser.gz'
jar = './stanford-ner/stanford-ner.jar'
st = StanfordNERTagger('stanford-ner\classifiers\english.all.3class.distsim.crf.ser.gz',
'stanford-ner\stanford-ner.jar',
encoding='utf-8')
st = StanfordNERTagger(classifier, jar)
data = []
def spasi(number):
return " " * number
def output(dict):
for (word, value) in dict:
data.append([word,value])
if len(word) < 30:
print(word + spasi(30 - len(word)) + ": " + str(value))
else:
print(word + ": " + str(value))
def create_Model(inputFile, outputFile, log=False) :
fd = codecs.open(inputFile, 'r', 'utf-8')
text = fd.read()
fd.close()
all_words = re.findall(r'[a-z]+', text.lower())
words = [w for w in all_words if w not in stop]
# Counter
word_count = Counter(words)
total_words = len(words)
indices = {}
for w in word_count:
if log :
indices[w] = math.log(word_count[w] / total_words)
else:
indices[w] = word_count[w] / total_words
sorted_indices = sorted(indices.items(), key=operator.itemgetter(1), reverse=True)
data = dict(sorted_indices)
with open(outputFile, 'wb') as f:
pickle.dump(data, f)
if isfile(outputFile):
print("File ",outputFile," Created")
return sorted_indices
def bigrams(input_list):
bigram_list = []
j = len(input_list)-1
for i in range(len(input_list)-1):
bigram_list.append(input_list[i]+" "+input_list[i+1])
print(i,"->",j)
return bigram_list
def create_ModelBigram(inputFile, outputFile, log=False) :
fd = codecs.open(inputFile, 'r', 'utf-8')
text = fd.read()
fd.close()
all_words = re.findall(r'[a-z]+', text.lower())
# words = [w for w in all_words if w not in stop]
words = bigrams(all_words)
# Counter
word_count = Counter(words)
print(word_count)
indices = {}
for w in word_count:
if log :
indices[w] = math.log(word_count[w])
else:
indices[w] = word_count[w]
# sorted_indices = sorted(indices.items(), key=operator.itemgetter(1), reverse=True)
# data = dict(indices)
with open(outputFile, 'wb') as f:
pickle.dump(data, f)
if isfile(outputFile):
print("File ",outputFile," Created")
return indices
def datasetNER(outputFile) :
dataset = cwi_readDataset.readNNSeval()
eval = []
for line in dataset :
print(line)
text = line[0]
text = st.tag(text.split())
i = 0
for line2 in text :
line2 = list(line2)
line2[0] = line2[0].lower()
if re.sub(r'[^a-z]','',line2[0]) :
if i == int(line[1][1]) :
eval.append([line2[0],'1',line2[1]])
else:
eval.append([line2[0],'0', line2[1]])
i+=1
with open(outputFile, 'wb') as f:
pickle.dump(eval, f)
if isfile(outputFile):
print("File ", outputFile, " Created")
def loadDatasetNER(fileName) :
if isfile(fileName):
print("File Loaded")
f = open(fileName, 'rb')
s = pickle.load(f)
f.close()
return s
else:
print("File Tidak Ada")
return 0
def loadModel(fileName) :
if isfile(fileName):
print("File Loaded")
f = open(fileName, 'rb')
s = pickle.load(f)
f.close()
return s
else:
print("File Tidak Ada")
return 0
create_ModelBigram("normal.txt",'bigram_normal.txt',log=True)
# for k,v in loadModel("bigram_simple.txt").items() :
# if v > 4 :
# print(k,v)
# print(loadModel("bigram_simple.0txt")["even out"])