@@ -188,7 +188,9 @@ def load_catalog(filename, type='csep-csv', format='native', loader=None, apply_
188
188
189
189
def query_comcat (start_time , end_time , min_magnitude = 2.50 ,
190
190
min_latitude = 31.50 , max_latitude = 43.00 ,
191
- min_longitude = - 125.40 , max_longitude = - 113.10 , verbose = True ,
191
+ min_longitude = - 125.40 , max_longitude = - 113.10 ,
192
+ max_depth = 1000 ,
193
+ verbose = True ,
192
194
apply_filters = False , ** kwargs ):
193
195
"""
194
196
Access Comcat catalog through web service
@@ -201,19 +203,20 @@ def query_comcat(start_time, end_time, min_magnitude=2.50,
201
203
max_latitude: max latitude of bounding box
202
204
min_longitude: min latitude of bounding box
203
205
max_longitude: max longitude of bounding box
204
- region: :class:`csep.core.regions.CartesianGrid2D
206
+ max_depth: maximum depth of the bounding box
205
207
verbose (bool): print catalog summary statistics
206
208
207
209
Returns:
208
- :class:`csep.core.catalogs.ComcatCatalog
210
+ :class:`csep.core.catalogs.CSEPCatalog
209
211
"""
210
212
211
213
# Timezone should be in UTC
212
214
t0 = time .time ()
213
215
eventlist = readers ._query_comcat (start_time = start_time , end_time = end_time ,
214
216
min_magnitude = min_magnitude ,
215
217
min_latitude = min_latitude , max_latitude = max_latitude ,
216
- min_longitude = min_longitude , max_longitude = max_longitude )
218
+ min_longitude = min_longitude , max_longitude = max_longitude ,
219
+ max_depth = max_depth )
217
220
t1 = time .time ()
218
221
comcat = catalogs .CSEPCatalog (data = eventlist , date_accessed = utc_now_datetime (), ** kwargs )
219
222
print ("Fetched ComCat catalog in {} seconds.\n " .format (t1 - t0 ))
@@ -234,6 +237,59 @@ def query_comcat(start_time, end_time, min_magnitude=2.50,
234
237
235
238
return comcat
236
239
240
+
241
+ def query_bsi (start_time , end_time , min_magnitude = 2.50 ,
242
+ min_latitude = 32.0 , max_latitude = 50.0 ,
243
+ min_longitude = 2.0 , max_longitude = 21.0 ,
244
+ max_depth = 1000 ,
245
+ verbose = True ,
246
+ apply_filters = False , ** kwargs ):
247
+ """
248
+ Access BSI catalog through web service
249
+
250
+ Args:
251
+ start_time: datetime object of start of catalog
252
+ end_time: datetime object for end of catalog
253
+ min_magnitude: minimum magnitude to query
254
+ min_latitude: maximum magnitude to query
255
+ max_latitude: max latitude of bounding box
256
+ min_longitude: min latitude of bounding box
257
+ max_longitude: max longitude of bounding box
258
+ max_depth: maximum depth of the bounding box
259
+ verbose (bool): print catalog summary statistics
260
+
261
+ Returns:
262
+ :class:`csep.core.catalogs.CSEPCatalog
263
+ """
264
+
265
+ # Timezone should be in UTC
266
+ t0 = time .time ()
267
+ eventlist = readers ._query_bsi (start_time = start_time , end_time = end_time ,
268
+ min_magnitude = min_magnitude ,
269
+ min_latitude = min_latitude , max_latitude = max_latitude ,
270
+ min_longitude = min_longitude , max_longitude = max_longitude ,
271
+ max_depth = max_depth )
272
+ t1 = time .time ()
273
+ bsi = catalogs .CSEPCatalog (data = eventlist , date_accessed = utc_now_datetime (), ** kwargs )
274
+ print ("Fetched BSI catalog in {} seconds.\n " .format (t1 - t0 ))
275
+
276
+ if apply_filters :
277
+ try :
278
+ bsi = bsi .filter ().filter_spatial ()
279
+ except CSEPCatalogException :
280
+ bsi = bsi .filter ()
281
+
282
+ if verbose :
283
+ print ("Downloaded catalog from Bollettino Sismico Italiano (BSI) with following parameters" )
284
+ print ("Start Date: {}\n End Date: {}" .format (str (bsi .start_time ), str (bsi .end_time )))
285
+ print ("Min Latitude: {} and Max Latitude: {}" .format (bsi .min_latitude , bsi .max_latitude ))
286
+ print ("Min Longitude: {} and Max Longitude: {}" .format (bsi .min_longitude , bsi .max_longitude ))
287
+ print ("Min Magnitude: {}" .format (bsi .min_magnitude ))
288
+ print (f"Found { bsi .event_count } events in the BSI catalog." )
289
+
290
+ return bsi
291
+
292
+
237
293
def load_evaluation_result (fname ):
238
294
""" Load evaluation result stored as json file
239
295
0 commit comments