-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathMIUI主题商店爬虫.py
85 lines (72 loc) · 2.44 KB
/
MIUI主题商店爬虫.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
from lib import cmd
from lib import html
from lib import paths
from lib import json
from lib import xml
from lib import zips
def isnext(soup):
if len(soup.findAll("a", text=["下一页"])) > 0:
return True
else:
return False
def getTheme(themeid):
page = "http://m.zhuti.xiaomi.com" + themeid
soup = html.getsoup(page)
s = str(soup.find_all("div", class_="theme-tit")[0])
title = s[s.rfind("tit\">") + 1 + 11:s.rfind("<s")]
size = s[s.rfind("<span>")+6:s.rfind("</")-8]
s = str(soup.find_all("div", class_="theme-infos")[0])
defigner = s[s.rfind("设计师") + 4:s.rfind("<br") - 7]
maker = s[s.rfind("制作者") + 4:s.rfind("</") - 5]
s = str(soup.find_all("div", class_="theme-introduce")[0])
# description = s[29:len(s) - 7]
s = str(soup.find_all("div", class_="version-info")[0])
update = s[s.rfind("info")+11:len(s) - 6].replace(":","-")
s = str(soup.find_all("a", class_="btn-comments")[0])
comment = s[s.rfind("<span>")+6:s.rfind("</span>")] + "条"
downloadlink = "http://zhuti.xiaomi.com" + themeid.replace("detail","download")
s = str(soup.find_all("div", class_="star-rank")[0])
score = s[s.rfind("rank")+6:s.rfind("><")-1] + "分(满分10分)"
# price = ""
print(" 名称: " + title)
print(" 大小: " + size)
# print(" 设计师: " + defigner)
# print(" 制作者: " + maker)
# print(" 介绍: " + description)
# print("更新时间: " + update)
# print(" 评论: " + comment)
# print("下载地址: " + downloadlink)
# print(" 评分: " + score)
# print(" 售价: " + price)
if html.download(downloadlink,paths.startpath() + "/下载/" + title + " " + update + ".mtz"):
# zips.unzip(paths.startpath() + "/下载/" + title + " " + update + ".mtz",paths.startpath() + "/下载测试/" + title + " " + update)
pass
# print("")
# cmd.pause("执行完成,按任意键继续...")
def getThemes(soup):
soup1 = html.soup(str(soup.find_all("ul", class_="thumb-list")))
soup2 = soup1.find_all("a")
istheme = True
for theme in soup2:
if istheme:
getTheme(theme["href"])
istheme = False
else:
istheme = True
def main():
cmd.title("MIUI主题商店爬虫")
cmd.size(70,36)
try:
page = 100
while True:
soup = html.getsoup('http://zhuti.xiaomi.com/compound?page=' + str(page) + '&sort=New')
if isnext(soup):
getThemes(soup)
page += 1
else:
break
except Exception as e:
print(e)
cmd.pause("执行完成,按任意键继续...")
if __name__=='__main__':
main()