-
Notifications
You must be signed in to change notification settings - Fork 0
/
SyncSolr.py
182 lines (132 loc) · 5.24 KB
/
SyncSolr.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
__author__ = 'chuqiao'
import csv
import urllib2
import pysolr
import logging
import sys
reload(sys)
sys.setdefaultencoding('utf8')
logging.basicConfig(filename='syncsolr.log', format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%d/%m/%Y %I:%M:%S', filemode='w', level=logging.DEBUG)
# def logger():
# """
# Function that initialises logging system
# """
# global logger
# # create logger with 'syncsolr'
# logger = logging.getLogger('syncsolr')
# logger.setLevel(logging.DEBUG)
#
# # specifies the lowest severity that will be dispatched to the appropriate destination
#
# # create file handler which logs even debug messages
# fh = logging.FileHandler('syncsolr.log')
# # fh.setLevel(logging.WARN)
#
# # create console handler and set level to debug
# ch = logging.StreamHandler()
# # StreamHandler instances send messages to streams
# # ch.setLevel(logging.DEBUG)
#
# # create formatter and add it to the handlers
# formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# fh.setFormatter(formatter)
# ch.setFormatter(formatter)
# # add the handlers to the logger
# logger.addHandler(ch)
# logger.addHandler(fh)
def init(csvUrl,iannSolrUrl):
# logger()
logging.info('****Starting synchronizing***')
syncSolr(csvUrl,iannSolrUrl)
def syncSolr(csvUrl,iannSolrUrl):
try:
logging.info("push data from a url of CSV file to IANN solr")
getDataFromCsv(csvUrl)
docs = getDataFromCsv(csvUrl)
deleteDataInSolr(iannSolrUrl)
pushToIannSolr(docs,iannSolrUrl)
logging.info('***Finished synchronizing***')
except Exception as e:
logging.error('***Synchronize failed*** \n%s' % str(sys.exc_info()))
def getDataFromCsv(csvUrl):
"""
crawling data form the url and generate a Iann Solr data structure
"""
url = csvUrl
response = urllib2.urlopen(url)
csvReader = csv.reader(response)
# start from next remove the header
csvReader.next()
# create the new header
header = ['id', 'title', 'subtitle', 'start', 'end', 'provider', 'description',
'category', 'keyword', 'field', 'venue', 'city', 'country', 'postcode','latitude','longitude',
'link']
data = []
for column in csvReader:
drow = dict(zip(header, column))
# transfer different values
if drow['category'] == 'Receptions and networking':
drow['category'] = "meeting"
elif drow['category']== 'Meetings and conferences':
drow['category'] = 'meeting'
elif drow['category']=='Awards and prizegivings':
drow['category'] ='meeting'
else:
drow['category'] ='course'
# give the start date value to end date if the end date is none
if drow['end'] != '':
drow['end'] = drow['end']
else:
drow['end']= drow['start']
# replace slash to none in keyword string
keywordValue = drow['keyword']
listKeywordValue = keywordValue.replace('\\,',',')
myKeywordlist = listKeywordValue.replace(' ','').split(',')
drow['keyword'] = myKeywordlist
fieldValue = drow['field']
listFieldValue = fieldValue.replace('\\,',',')
myFieldlist = listFieldValue.replace(' ','').split(',')
drow['field'] = myFieldlist
# replace slash to none in Venue string
venueValue = drow['venue']
listVenueValue = venueValue.replace('\\,',',')
drow['venue'] = listVenueValue
# insert value events into category
categoryValue = drow['category']
listCategories = [categoryValue, "event"]
drow['category'] = listCategories
# remove the keys with the empty values
drowRemoveEmptyValue = dict((k, v) for k, v in drow.iteritems() if v)
# add dict to a data list
data.append(drowRemoveEmptyValue)
return data
def deleteDataInSolr(iannSolrUrl):
"""
delete all the Solr data
"""
# solrUrl = 'http://localhost:8982/solr/iann'
solr = pysolr.Solr(iannSolrUrl, timeout=10)
query = '*:*'
solr.delete(q='%s' % query)
def pushToIannSolr(docs,iannSolrUrl):
"""
Adds data to Iann SOLR from a SOLR data structure
"""
solr = pysolr.Solr(iannSolrUrl, timeout=10)
solr.add(
docs
)
# if len(sys.argv) == 3:
# args = sys.argv
# init(args[1],args[2])
# else:
# init(
# "http://139.162.217.53:8983/solr/eventsportal/select?q=*:*&fl=eventId,name,alternateName,startDate,endDate,hostInstitution,description,eventType,keywords,topic,locationName,locationCity,locationCountry,locationPostcode,latitude,longitude,url,&rows=2147483647&wt=csv",
# "http://iann.pro/solrdev/iann"
# )
if __name__ == '__main__':
init(
"http://139.162.217.53:8983/solr/eventsportal/select?q=*:*&fl=eventId,name,alternateName,startDate,endDate,hostInstitution,description,eventType,keywords,topic,locationName,locationCity,locationCountry,locationPostcode,latitude,longitude,url,&rows=2147483647&wt=csv",
"http://iann.pro/solr/iann"
)