-
Notifications
You must be signed in to change notification settings - Fork 0
/
summary.py
66 lines (52 loc) · 1.91 KB
/
summary.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
'''
This script utilizes natural language processing via the nltk module to
summarize the news articles. These summaries are more relavant/useful to the
topic at hand compared to the newspaper3k summaries, as newspaper doesn't use
optimal analyses
'''
# importing libraries
import nltk
nltk.download("stopwords")
nltk.download("punkt")
from nltk.corpus import stopwords
from nltk.tokenize import punkt, sent_tokenize, word_tokenize
def summarize_text(text) -> str:
# Tokenizing the text
stopWords = set(stopwords.words("english"))
words = word_tokenize(text)
# Creating a frequency table to keep the score of each word
freqTable = dict()
for word in words:
word = word.lower()
if word in stopWords:
continue
if word in freqTable:
freqTable[word] += 1
else:
freqTable[word] = 1
# Creating a dictionary to keep the score of each sentence
sentences = sent_tokenize(text)
sentenceValue = dict()
for sentence in sentences:
for word, freq in freqTable.items():
if word in sentence.lower():
if sentence in sentenceValue:
sentenceValue[sentence] += freq
else:
sentenceValue[sentence] = freq
sumValues = 0
for sentence in sentenceValue:
sumValues += sentenceValue[sentence]
# Average value of a sentence from the original text
if len(sentenceValue) == 0:
return ''
average = int(sumValues / len(sentenceValue))
# Storing sentences into our summary.
summary = ''
for sentence in sentences:
#Set sentence value to be greater than 1.5 * average to create relatively short summaries
if (sentence in sentenceValue) and (sentenceValue[sentence] > (1.5 * average)) and (not sentence.isupper()):
summary += " " + sentence
if len(summary) == 0:
return text
return summary