-
Notifications
You must be signed in to change notification settings - Fork 10
/
dare2compete.py
47 lines (42 loc) · 1.46 KB
/
dare2compete.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import json
import requests
import time
import traceback
from bs4 import BeautifulSoup
def addBlanks(row):
for i in row:
if row[i] is None:
row[i] = ''
def scrape():
timestamp = time.time()
headers = json.load(open('headers.json'))
json_filename = './files/dare2compete.json'
fp = open(json_filename, 'w')
url = 'https://api.dare2compete.com/api/opportunity/search?opportunity=internships&sort=latest&page=1%20HTTP/1.1'
response = requests.get(url)
jobs = json.loads(response.text)
jobs = jobs['data']['data']
joblist = []
for job in jobs:
row = dict.fromkeys(headers)
row['title'] = job['title']
row['applylink'] = 'https://dare2compete.com/' + str(job['public_url'])
row['jd'] = [x for x in BeautifulSoup(job['details'], "lxml").stripped_strings][0]
row['location'] = job['location']
row['type'] = job['type']
row['startdate'] = job['start_date']
row['enddate'] = job['end_date']
row['created'] = job['display_date']
row['source'] = 'dare2compete'
row['timestamp'] = timestamp
addBlanks(row)
print(row)
joblist.append(row)
json.dump(joblist, fp, indent=1)
fp.close()
try:
scrape()
except Exception as ex:
with open("error.log", 'a') as errorlog:
# print(time.asctime() + ":" + ex, file=errorlog)
traceback.print_exc(file=errorlog)