-
Notifications
You must be signed in to change notification settings - Fork 0
/
db.py
172 lines (134 loc) · 5.34 KB
/
db.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
import mysql.connector as m
import json
import feedparser
import uuid
with open("./credentials.json", "r") as f:
credentials = json.load(f)
username = credentials["username"]
password = credentials["password"]
host = credentials["db-host"]
if username == "":
print("------------------------------------------------------")
print("\n\033[31mSET YOUR USERNAME AND PASSWORD IN ./credentials.json\033[0m\n")
print("------------------------------------------------------")
exit()
db = m.connect(host=host, user=username, passwd=password)
cursor = db.cursor()
try:
cursor.execute("CREATE DATABASE IF NOT EXISTS reader")
cursor.execute(
"CREATE TABLE IF NOT EXISTS reader.feedUrls (urls VARCHAR(900), feedName VARCHAR(255))"
)
cursor.execute(
"""CREATE TABLE IF NOT EXISTS reader.articles (name VARCHAR(255) NOT NULL,
uniq_id VARCHAR(32) NOT NULL,
title VARCHAR(255) NOT NULL,
published VARCHAR(255) NOT NULL,
orginal_link VARCHAR(255) NOT NULL,
content MEDIUMTEXT CHARACTER SET utf8mb4,
viewed ENUM('y', 'n'), UNIQUE(title))"""
)
cursor.execute("USE reader")
except m.Error as e:
print("---------------------------------------------")
print(f"\033[31m Error Code : {e.errno}\033[0m")
print(f"\033[31m Error Message : {e.msg}\033[0m")
print("---------------------------------------------")
exit()
def fetch_urls_from_db():
cursor.execute("SELECT * FROM feedUrls")
output = cursor.fetchall()
url_data = output
return url_data
def validate_rss(url):
validation = feedparser.parse(url).version
if validation != "":
return True
elif validation == "":
return False
def add_feed_urls_to_db(url, name):
insert_command = "INSERT INTO feedUrls VALUES (%s, %s)"
print("---------------------------------------------")
print(f"INSERTION CMD : {insert_command}, {url}")
print("---------------------------------------------")
cursor.execute(insert_command, (url, name))
db.commit()
def delete_from_db(name):
delete_feed_urls_command = "DELETE FROM feedUrls WHERE feedName=%s"
delete_articles_command = "DELETE FROM articles WHERE name=%s"
print("---------------------------------------------")
print(f"DELETION CMD : {delete_feed_urls_command % name}")
print("---------------------------------------------")
try:
cursor.execute(delete_feed_urls_command, (name,))
cursor.execute(delete_articles_command, (name,))
db.commit()
print("---------------------------------------------")
print("Data deleted successfully")
print("---------------------------------------------")
return True
except:
print("---------------------------------------------")
print("Failed to delete data")
print("---------------------------------------------")
return False
async def feed_fetch():
url_data = fetch_urls_from_db()
print("-------------------------")
print(f"Full Url Data: ")
print(f"{url_data}")
print("-------------------------")
for url in url_data:
print("-------------------------")
print(f"Parsing : {url}")
print("-------------------------")
parser = feedparser.parse(url[0])
for x in range(len(parser["entries"])):
name = url[1]
uniq_id = uuid.uuid4().hex
title = parser["entries"][x]["title"]
cursor.execute("SELECT * FROM articles WHERE title=%s", (title,))
existing_article = cursor.fetchone()
if existing_article is None:
if "published_parsed" in parser["entries"][x]:
published = parser["entries"][x]["published_parsed"]
else:
published = parser["entries"][x]["updated_parsed"]
published = list(published)
published = f"{published[0]}/{published[1]}/{published[2]} {published[3]}:{published[4]}"
orginal_link = parser["entries"][x]["links"][0]["href"]
if "content" in parser["entries"][x]:
content = parser["entries"][x]["content"][0]["value"]
else:
content = parser["entries"][x]["summary"]
viewed = "n"
insert_command = "INSERT INTO articles VALUES (%s, %s, %s, %s, %s, %s, %s)"
cursor.execute(
insert_command,
(name, uniq_id, title, published, orginal_link, content, viewed),
)
db.commit()
else:
continue
def fetch_data_from_db():
cursor.execute(
"SELECT * FROM articles ORDER BY STR_TO_DATE(published, '%Y/%m/%d') DESC"
)
fetched_data = cursor.fetchall()
articles = []
for article in fetched_data:
feed = {
"name": article[0],
"uniq_id": article[1],
"title": article[2],
"published": article[3],
"orginal_link": article[4],
"content": article[5],
"viewed": article[6],
}
articles.append(feed)
return articles
def mark_article_read(uniq_id):
update_command = "UPDATE articles SET viewed = 'y' WHERE uniq_id=%s"
cursor.execute(update_command, (uniq_id,))
db.commit()