-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathfenci.py
165 lines (148 loc) · 4.04 KB
/
fenci.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
# -*- coding: UTF-8 -*-
import re
import sys
d = {}
def init(filename="SogouLabDic.dic"):
f = open(filename, 'r')
for line in f:
word, freq = line.split('\t')[0:2]
try:
d[word.decode("gbk")] = int(freq)
except:
d[word] = int(freq)
def maxmatch(s):
maxlen = 5
l = len(s)
p = 0
result = {}
while p < l:
length = min(maxlen, l-p)
wlen = 1
for i in range(length, 0, -1):
if d.has_key(s[p:p+i]):
wlen = i
break
if wlen > 1:
result.setdefault(s[p:p+wlen], 0)
result[s[p:p+wlen]] += 1
p += wlen
return result
def maxmatch_back(s):
maxlen = 5
l = len(s)
result = {}
while l > 0:
length = min(maxlen, l)
wlen = 1
for i in range(length, 0, -1):
if d.has_key(s[l-i:l]):
wlen = i
break
if wlen > 1:
result.setdefault(s[l-wlen:l], 0)
result[s[l-wlen:l]] += 1
l -= wlen
return result
def one_word(s, start, rest=3):
result = []
maxlen = 5
l = len(s)
for former in start:
p = former[len(former)-1]
if p >= l:
result.append(former)
break
length = min(maxlen, l-p)
num = 0
for i in range(1, length+1):
if d.has_key(s[p:p+i]):
result.append(former + [p+i])
num += 1
if num == 0: result.append(former + [p+1])
if rest > 1: return one_word(s, result, rest-1)
else: return result
def three_word_chunk(s, start):
result = one_word(s, [[start]], 3)
longest = 0
lset = []
for i in range(len(result)):
cur = result[i][len(result[i])-1] - result[i][0]
if cur > longest:
longest = cur
lset = [i]
elif cur == longest:
lset.append(i)
if len(lset) == 1:
return result[lset[0]]
else:
# get the longest averge
longavg = 0
lavg = []
for i in range(len(lset)):
cur = longest / float(len(result[lset[i]])-1)
if cur > longavg:
longavg = cur
lavg = [lset[i]]
elif cur == longavg:
lavg.append(lset[i])
lset = lavg
longest = longavg
if len(lset) == 1:
return result[lset[0]]
else:
# get the minmum dx
mindk = sys.maxint
dkset = []
for i in range(len(lset)):
cur = 0
for j in range(1, len(result[lset[i]])):
wordlen = result[lset[i]][j] - result[lset[i]][j-1]
cur += pow((wordlen - longest), 2)
if cur < mindk:
mindk = cur
dkset = [lset[i]]
elif cur == mindk:
dkset.append(lset[i])
lset = dkset
longest = mindk
if len(lset) == 1:
return result[lset[0]]
else:
# get the maxmum frequency
maxFre = 0
fset = []
for i in range(len(lset)):
cur = 0
for j in range(1, len(result[i])):
key = s[result[i][j-1]:result[i][j]]
if d.has_key(key):
cur += d[key]
if cur > maxFre:
maxFre = cur
fset = [lset[i]]
elif cur == maxFre:
fset.append(lset[i])
lset = fset
longest = maxFre
if len(lset) == 1:
return result[lset[0]]
else:
# print 'Really More than one...', lset
return result[lset[0]]
# look ahead two more words
def mmseg(s):
maxlen = 5
l = len(s)
p = 0
result = {}
while p < l:
chunk = three_word_chunk(s, p)
if(len(chunk) < 2): break
if chunk[1] - chunk[0] > 1:
result.setdefault(s[chunk[0]:chunk[1]], 0)
result[s[chunk[0]:chunk[1]]] += 1
p = chunk[1]
return result
def solve(s, segment=maxmatch):
s = s.decode("utf8")
return segment(s)