-
Notifications
You must be signed in to change notification settings - Fork 6
/
trends.py
326 lines (266 loc) · 10.8 KB
/
trends.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
"""Visualizing Twitter Sentiment Across America"""
import string
from data import word_sentiments, load_tweets
from geo import us_states, geo_distance, make_position, longitude, latitude
from maps import draw_state, draw_name, draw_dot, wait
from ucb import main, trace, interact, log_current_line
# Phase 1: The feelings in tweets
def make_tweet(text, time, lat, lon):
"""Return a tweet, represented as a python dictionary.
text -- A string; the text of the tweet, all in lowercase
time -- A datetime object; the time that the tweet was posted
lat -- A number; the latitude of the tweet's location
lon -- A number; the longitude of the tweet's location
"""
return {'text': text, 'time': time, 'latitude': lat, 'longitude': lon}
def tweet_words(tweet):
"""Return a list of words in the tweet.
Arguments:
tweet -- a tweet abstract data type.
Return 1 value:
- The list of words in the tweet.
"""
return extract_words(tweet['text'])
def tweet_location(tweet):
"""Return a position (see geo.py) that represents the tweet's location."""
return make_position(tweet['latitude'],tweet['longitude'])
def tweet_string(tweet):
"""Return a string representing the tweet."""
return '"{0}" @ {1}'.format(tweet['text'], tweet_location(tweet))
def extract_words(text):
"""Return the words in a tweet, not including punctuation.
>>> extract_words('anything else.....not my job')
['anything', 'else', 'not', 'my', 'job']
>>> extract_words('i love my job. #winning')
['i', 'love', 'my', 'job', 'winning']
>>> extract_words('make justin # 1 by tweeting #vma #justinbieber :)')
['make', 'justin', 'by', 'tweeting', 'vma', 'justinbieber']
>>> extract_words("paperclips! they're so awesome, cool, & useful!")
['paperclips', 'they', 're', 'so', 'awesome', 'cool', 'useful']
"""
s = ""
c = ''
for i in text:
if i not in string.ascii_letters:
i = ' '
s += i
return s.split()
def get_word_sentiment(word):
"""Return a number between -1 and +1 representing the degree of positive or
negative feeling in the given word.
Return None if the word is not in the sentiment dictionary.
(0 represents a neutral feeling, not an unknown feeling.)
>>> get_word_sentiment('good')
0.875
>>> get_word_sentiment('bad')
-0.625
>>> get_word_sentiment('winning')
0.5
>>> get_word_sentiment('Berkeley') # Returns None
"""
return word_sentiments.get(word, None)
def analyze_tweet_sentiment(tweet):
""" Return a number between -1 and +1 representing the degree of positive or
negative sentiment in the given tweet, averaging over all the words in the
tweet that have a sentiment score.
If there are words that don't have a sentiment score, leave them
out of the calculation.
If no words in the tweet have a sentiment score, return None.
(do not return 0, which represents neutral sentiment).
>>> positive = make_tweet('i love my job. #winning', None, 0, 0)
>>> round(analyze_tweet_sentiment(positive), 5)
0.29167
>>> negative = make_tweet("Thinking, 'I hate my job'", None, 0, 0)
>>> analyze_tweet_sentiment(negative)
-0.25
>>> no_sentiment = make_tweet("Go bears!", None, 0, 0)
>>> analyze_tweet_sentiment(no_sentiment)
"""
average = None
s = 0
text = tweet_words(tweet)
l = 0
for i in text:
temp = get_word_sentiment(i)
if temp:
s += temp
l += 1
if s != 0:
average = s / l
return average
#@main
def print_sentiment(text='Are you virtuous or verminous?'):
"""Print the words in text, annotated by their sentiment scores.
For example, to print each word of a sentence with its sentiment:
# python3 trends.py "computer science is my favorite!"
"""
words = extract_words(text.lower())
assert words, 'No words extracted from "' + text + '"'
layout = '{0:>' + str(len(max(words, key=len))) + '}: {1}'
for word in extract_words(text.lower()):
print(layout.format(word, get_word_sentiment(word)))
# Phase 2: The geometry of maps
def find_centroid(polygon):
"""Find the centroid of a polygon.
http://en.wikipedia.org/wiki/Centroid#Centroid_of_polygon
polygon -- A list of positions, in which the first and last are the same
Returns: 3 numbers; centroid latitude, centroid longitude, and polygon area
Hint: If a polygon has 0 area, return its first position as its centroid
>>> p1, p2, p3 = make_position(1, 2), make_position(3, 4), make_position(5, 0)
>>> triangle = [p1, p2, p3, p1] # First vertex is also the last vertex
>>> find_centroid(triangle)
(3.0, 2.0, 6.0)
>>> find_centroid([p1, p3, p2, p1])
(3.0, 2.0, 6.0)
>>> find_centroid([p1, p2, p1])
(1, 2, 0)
"""
lat, lon, area = 0, 0, 0
n = 0
while n < len(polygon) - 1:
temp = (polygon[n][0] * polygon[n+1][1] - polygon[n+1][0] * polygon[n][1])
lat += (polygon[n][0] + polygon[n+1][0]) * temp
lon += (polygon[n][1] + polygon[n+1][1]) * temp
area += temp
n += 1
area = 0.5 * area
if area == 0:
lat, lon = polygon[0][0], polygon[0][1]
area = int(0)
else:
lat = 1 / (6 * area) * lat
lon = 1 / (6 * area) * lon
return (lat, lon, abs(area))
def find_center(shapes):
"""Compute the geographic center of a state, averaged over its shapes.
The center is the average position of centroids of the polygons in shapes,
weighted by the area of those polygons.
Arguments:
shapes -- a list of polygons
>>> ca = find_center(us_states['CA']) # California
>>> round(latitude(ca), 5)
37.25389
>>> round(longitude(ca), 5)
-119.61439
>>> hi = find_center(us_states['HI']) # Hawaii
>>> round(latitude(hi), 5)
20.1489
>>> round(longitude(hi), 5)
-156.21763
"""
x, y, area = 0, 0, 0
for i in shapes:
temp = find_centroid(i)
area += temp[2]
x += temp[0] * temp[2]
y += temp[1] * temp[2]
x /= area
y /= area
return (x, y)
# Uncomment this decorator during Phase 2.
# @main
def draw_centered_map(center_state='TX', n=10):
"""Draw the n states closest to center_state.
For example, to draw the 20 states closest to California (including California):
# python3 trends.py CA 20
"""
us_centers = {n: find_center(s) for n, s in us_states.items()}
center = us_centers[center_state.upper()]
dist_from_center = lambda name: geo_distance(center, us_centers[name])
for name in sorted(us_states.keys(), key=dist_from_center)[:int(n)]:
draw_state(us_states[name])
draw_name(name, us_centers[name])
draw_dot(center, 1, 10) # Mark the center state with a red dot
wait()
# Phase 3: The mood of the nation
def find_closest_state(tweet, state_centers):
"""Return the name of the state closest to the given tweet's location.
Use the geo_distance function (already provided) to calculate distance
in miles between two latitude-longitude positions.
Arguments:
tweet -- a tweet abstract data type
state_centers -- a dictionary from state names to state shapes
>>> us_centers = {n: find_center(s) for n, s in us_states.items()}
>>> sf = make_tweet("Welcome to San Francisco", None, 38, -122)
>>> ny = make_tweet("Welcome to New York", None, 41, -74)
>>> find_closest_state(sf, us_centers)
'CA'
>>> find_closest_state(ny, us_centers)
'NJ'
"""
position = (tweet['latitude'], tweet['longitude'])
distance = lambda name : geo_distance(position, state_centers[name])
closest = sorted(state_centers.keys(), key=distance)
return closest[0]
def group_tweets_by_state(tweets):
"""Return a dictionary that aggregates tweets by their nearest state center.
The keys of the returned dictionary are state names, and the values are
lists of tweets that appear closer to that state center than any other.
tweets -- a sequence of tweet abstract data types
>>> sf = make_tweet("Welcome to San Francisco", None, 38, -122)
>>> ny = make_tweet("Welcome to New York", None, 41, -74)
>>> ca_tweets = group_tweets_by_state([sf, ny])['CA']
>>> tweet_string(ca_tweets[0])
'"Welcome to San Francisco" @ (38, -122)'
"""
tweets_by_state = {}
us_centers = {n: find_center(s) for n, s in us_states.items()}
for i in tweets:
name = find_closest_state(i, us_centers)
if not tweets_by_state.get(name, 0):
tweets_by_state[name] = [i]
else:
tweets_by_state[name].append(i)
return tweets_by_state
def calculate_average_sentiments(tweets_by_state):
"""Calculate the average sentiment of the states by averaging over all
the tweets from each state. Return the result as a dictionary from state
names to average sentiment values.
If a state has no tweets with sentiment values, leave it out of the
dictionary entirely. Do not include a states with no tweets, or with tweets
that have no sentiment, as 0. 0 represents neutral sentiment, not unknown
sentiment.
tweets_by_state -- A dictionary from state names to lists of tweets
"""
averaged_state_sentiments = {}
for key in tweets_by_state.keys():
flag = False
average = 0
for i in tweets_by_state[key]:
temp = analyze_tweet_sentiment(i)
if temp is not None:
average += temp
flag = True
if flag:
averaged_state_sentiments[key] = average
return averaged_state_sentiments
def draw_state_sentiments(state_sentiments={}):
"""Draw all U.S. states in colors corresponding to their sentiment value.
Unknown state names are ignored; states without values are colored grey.
state_sentiments -- A dictionary from state strings to sentiment values
"""
for name, shapes in us_states.items():
sentiment = state_sentiments.get(name, None)
draw_state(shapes, sentiment)
for name, shapes in us_states.items():
center = find_center(shapes)
if center is not None:
draw_name(name, center)
# Uncomment this decorator during Phase 3.
@main
def draw_map_for_term(term='my job'):
"""
Draw the sentiment map corresponding to the tweets that match term.
term -- a word or phrase to filter the tweets by.
To visualize tweets containing the word "obama":
# python3 trends.py obama
Some term suggestions:
New York, Texas, sandwich, my life, justinbieber
"""
tweets = load_tweets(make_tweet, term)
tweets_by_state = group_tweets_by_state(tweets)
state_sentiments = calculate_average_sentiments(tweets_by_state)
draw_state_sentiments(state_sentiments)
for tweet in tweets:
draw_dot(tweet_location(tweet), analyze_tweet_sentiment(tweet))
wait()