-
Notifications
You must be signed in to change notification settings - Fork 10
/
internshala.py
64 lines (57 loc) · 2.58 KB
/
internshala.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import json
import time
import traceback
from bs4 import BeautifulSoup as soup
from urllib.request import urlopen as uReq
def addBlanks(row):
for i in row:
if row[i] is None:
row[i] = ''
def scrape():
timestamp = time.time()
headers = json.load(open('headers.json'))
filename = "./files/internshala.json"
fp = open(filename, "w")
joblist = []
for i in range(0, 3):
my_url = 'https://internshala.com/internships/internship-in-bangalore/page-{}'.format(i)
uClient = uReq(my_url)
page_html = uClient.read()
uClient.close()
page_soup = soup(page_html, "html.parser")
containers = page_soup.findAll("div", {"class": "container-fluid individual_internship "})
for container in containers:
row = dict.fromkeys(headers)
row['title'] = container.a.text
row['applylink'] = 'https://internshala.com' + container.a["href"]
company_container = container.findAll("a", {"class": "link_display_like_text"})
row['companyname'] = company_container[0].text
location_container = container.findAll("a", {"class": "location_link"})
row['location'] = location_container[0].text
stipend_container = container.findAll("td", {"class": "stipend_container_table_cell"})
row['salary'] = stipend_container[0].text
type_container = container.findAll("div", {"class": "button_container"})
row['type'] = " ".join(type_container[0].div.text.split())
start_container = container.findAll("div", {"id": "start-date-first"})
row['startdate'] = start_container[0].text
apply_by_container = container.findAll("div", {"class": "table-responsive"})
apply_by = apply_by_container[0].findAll("td")
row['enddate'] = apply_by[4].text
posted_on_container = container.findAll("div", {"class": "table-responsive"})
posted_on = posted_on_container[0].findAll("td")
row['created'] = posted_on[3].text
row['source'] = 'internshala'
row['experience'] = 'Fresher'
row['location'] = 'Bengaluru'
row['timestamp'] = timestamp
addBlanks(row)
print(row)
joblist.append(row)
json.dump(joblist, fp, indent=1)
fp.close()
try:
scrape()
except Exception as ex:
with open("error.log", 'a') as errorlog:
# print(time.asctime() + ":" + ex, file=errorlog)
traceback.print_exc(file=errorlog)