-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathproject_using_class.py
119 lines (91 loc) · 4.74 KB
/
project_using_class.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
from flask import Flask
from flask.ext.classy import FlaskView
import MySQLdb
from bs4 import BeautifulSoup
import requests
app = Flask(__name__)
news_sites=[
{
'id':'timesofindia.indiatimes.com',
'url':'/rssfeeds/1221656.cms'
},
{
'id':'zeenews.india.com',
'url':'/rss/world-news.xml'
},
{
'id':'www.ndtv.com',
'url':'http://feeds.feedburner.com/ndtvnews-latest'
}
]
class ApiView(FlaskView):
def index(self):
return 'this is blank'
def get(self,news):
if(news==news_sites[2]['id']):
url1 = news_sites[2]['url']
# i wrote this extra line above ,bcoz www.ndtv.com is redirecting to feedburner.com/ndtvnews-latest so i need to match that url with my mine in dictionry above.
html=requests.get(url1)
if(html):
soup = BeautifulSoup(html.content, 'html.parser')
items = soup.find_all('item')
conn = MySQLdb.connect(host="localhost",user="root", passwd="", db="news")
cursor=conn.cursor()
cursor.execute ('delete from news') # to delete all entries (to enter fresh entries)
cursor.close()
conn.commit()
conn.close()
for count in range(3):
item=items[count]
title=str(item.title.text).encode('utf8')
date=str(item.pubdate.text).encode('utf8')
description=str((item.description.text).encode("utf8"))
print title
print date
print str(description)
conn = MySQLdb.connect(host="localhost",user="root", passwd="", db="news")
cursor=conn.cursor()
cursor.execute ('INSERT INTO news(title,date,story) VALUES(\"'+title+'\",\"'+date+'\",\"'+description+'\")')
cursor.close()
conn.commit()
conn.close()
if(html):
return "done.."
else:
return "not done.."
#for other two sites defined above
for count in range(3):
if(news==news_sites[count]['id']):
bb=count
url1="http://"+news+news_sites[bb]['url']
html=requests.get(url1)
if(html):
soup = BeautifulSoup(html.content, 'html.parser')
items = soup.find_all('item')
conn = MySQLdb.connect(host="localhost",user="root", passwd="", db="news")
cursor=conn.cursor()
cursor.execute ('delete from news') # to delete all entries (to enter fresh entries)
cursor.close()
conn.commit()
conn.close()
for count in range(3):
item=items[count]
title=str(item.title.text).encode('utf8')
date=str(item.pubdate.text).encode('utf8')
description=str((item.description.text).encode("utf8"))
print title
print date
print str(description)
conn = MySQLdb.connect(host="localhost",user="root", passwd="", db="news")
cursor=conn.cursor()
cursor.execute ('INSERT INTO news(title,date,story) VALUES(\"'+title+'\",\"'+date+'\",\"'+description+'\")')
cursor.close()
conn.commit()
conn.close()
if(html):
return "done.."
else:
return "not done"
ApiView.register(app)
if __name__=='__main__':
app.run()