Skip to content

Commit

Permalink
edits to filters
Browse files Browse the repository at this point in the history
  • Loading branch information
silburt committed Jan 18, 2018
1 parent dc865d7 commit cfc2efe
Show file tree
Hide file tree
Showing 5 changed files with 71 additions and 58 deletions.
29 changes: 14 additions & 15 deletions get_unique_craters.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def get_model_preds(CP):
return preds

#########################
def add_unique_craters(craters, craters_unique, thresh_longlat2, thresh_rad2):
def add_unique_craters(craters, craters_unique, thresh_longlat2, thresh_rad):
"""Generates unique crater distribution by filtering out duplicates.
Parameters
Expand All @@ -62,7 +62,7 @@ def add_unique_craters(craters, craters_unique, thresh_longlat2, thresh_rad2):
thresh_longlat2 : float.
Hyperparameter that controls the minimum squared longitude/latitude
difference between craters to be considered unique entries.
thresh_rad2 : float
thresh_rad : float
Hyperparaeter that controls the minimum squared radius difference
between craters to be considered unique entries.
Expand All @@ -71,20 +71,20 @@ def add_unique_craters(craters, craters_unique, thresh_longlat2, thresh_rad2):
craters_unique : array
Modified master array of unique crater tuples with new crater entries.
"""
km_to_deg = 180. / (np.pi * 1737.4)
k2d = 180. / (np.pi * 1737.4) # km to deg
Long, Lat, Rad = craters_unique.T
for j in range(len(craters)):
lo, la, r = craters[j].T
# Fractional long/lat change
diff_longlat = ((Long - lo)**2 + (Lat - la)**2) / (r * km_to_deg)**2
Rad_ = Rad[diff_longlat < thresh_longlat2]
if len(Rad_) > 0:
# Fractional radius change
diff_rad = ((Rad_ - r) / r)**2
index = diff_rad < thresh_rad2
if len(np.where(index == True)[0]) == 0:
craters_unique = np.vstack((craters_unique, craters[j]))
else:
la_m = (la + Lat) / 2.
minr = np.minimum(r, Rad)

# duplicate filtering criteria
dL = (((Long - lo)/(minr * k2d / np.cos(np.pi * la_m / 180.)))**2
+ ((Lat - la)/(minr * k2d))**2)
dR = np.abs(Rad - r) / minr
index = (dR < thresh_rad) & (dL < thresh_longlat2)

if len(np.where(index == True)[0]) == 0:
craters_unique = np.vstack((craters_unique, craters[j]))
return craters_unique

Expand Down Expand Up @@ -182,7 +182,6 @@ def extract_unique_craters(CP, craters_unique):
for i in range(CP['n_imgs']):
id = proc.get_id(i)

# detect craters from CNN-predicted image
coords = tmt.template_match_t(preds[i])

# convert, add to master dist
Expand All @@ -196,7 +195,7 @@ def extract_unique_craters(CP, craters_unique):
if len(craters_unique) > 0:
craters_unique = add_unique_craters(new_craters_unique,
craters_unique,
CP['llt2'], CP['rt2'])
CP['llt2'], CP['rt'])
else:
craters_unique = np.concatenate((craters_unique,
new_craters_unique))
Expand Down
6 changes: 3 additions & 3 deletions model_train.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,13 +204,13 @@ def get_metrics(data, craters, dim, model, beta=1):
print("""mean and std of (N_detect - N_match)/N_csv (fraction of
"craters that are new, 2) = %f, %f""" %
(np.mean(frac_new2), np.std(frac_new2)))
print("median and IQR fractional longitude diff = %f, 0.25:%f, 0.75:%f" %
print("median and IQR fractional longitude diff = %f, 25:%f, 75:%f" %
(np.median(err_lo), np.percentile(err_lo, 25),
np.percentile(err_lo, 75)))
print("median and IQR fractional latitude diff = %f, 0.25:%f, 0.75:%f" %
print("median and IQR fractional latitude diff = %f, 25:%f, 75:%f" %
(np.median(err_la), np.percentile(err_la, 25),
np.percentile(err_la, 75)))
print("median and IQR fractional radius diff = %f, 0.25:%f, 0.75:%f" %
print("median and IQR fractional radius diff = %f, 25:%f, 75:%f" %
(np.median(err_r), np.percentile(err_r, 25),
np.percentile(err_r, 75)))
print("""mean and std of maximum detected pixel radius in an image =
Expand Down
11 changes: 5 additions & 6 deletions run_get_unique_craters.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,18 +25,17 @@
CP['rt2'] = float(sys.argv[2]) # D_{R} from Silburt et. al (2017)

# Location of model to generate predictions (if they don't exist yet)
CP['dir_model'] = '../moon-craters/models/HEAD_final.h5'
CP['dir_model'] = 'models/model.h5'

# Location of where hdf5 data images are stored
CP['dir_data'] = '../moon-craters/datasets/HEAD/%s_images_final.hdf5' % CP['datatype']
CP['dir_data'] = 'catalogues/%s_images.hdf5' % CP['datatype']

# Location of where model predictions are/will be stored
CP['dir_preds'] = '../moon-craters/datasets/HEAD/HEAD_%spreds_n%d_final.hdf5' % (CP['datatype'],
CP['n_imgs'])
CP['dir_preds'] = 'catalogues/%s_preds_n%d.hdf5' % (CP['datatype'],
CP['n_imgs'])

# Location of where final unique crater distribution will be stored
CP['dir_result'] = 'datasets/HEAD/HEAD_%s_craterdist_llt%.2f_rt%.2f_' \
'final.npy' % (CP['datatype'], CP['llt2'], CP['rt2'])
CP['dir_result'] = 'catalogues/%s_craterdist.npy' % (CP['datatype'])

if __name__ == '__main__':
craters_unique = np.empty([0, 3])
Expand Down
4 changes: 2 additions & 2 deletions run_model_train.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,12 @@

# Number of train/valid/test samples, needs to be a multiple of batch size.
MP['n_train'] = 30000
MP['n_dev'] = 1000
MP['n_dev'] = 5000
MP['n_test'] = 5000

# Save model (binary flag) and directory.
MP['save_models'] = 1
MP['save_dir'] = 'models/DeepMoon.h5'
MP['save_dir'] = 'models/model.h5'

# Model Parameters (to potentially iterate over, keep in lists).
MP['N_runs'] = 1 # Number of runs
Expand Down
79 changes: 47 additions & 32 deletions utils/template_match_target.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,26 +4,37 @@

#####################################
"""
Crater Detection Hyperparameters
--------------------------------
Tuned Crater Detection Hyperparameters
--------------------------------------
minrad, maxrad : ints
radius range in match_template to search over.
longlat_thresh2, rad_thresh : floats
if (x1-x2)^2 + (y1-y2)^2 < longlat_thresh2 and abs(r1-r2) < max(1.0,rad_thresh*r1)
remove (x2,y2,r2) circle (it is a duplicate of another crater candidate). In
addition, when matching CNN-detected rings to corresponding csvs (i.e.
template_match_target_to_csv), the same criteria is used to determine a match.
if ((x1-x2)^2 + (y1-y2)^2)/min(r1,r2) < longlat_thresh2 and
abs(r1-r2) < max(min_rt, rad_thresh*min(r1,r2)) remove (x2,y2,r2) circle (it
is a duplicate of another crater candidate). In addition, when matching
CNN-detected rings to corresponding csvs (i.e. template_match_target_to_csv),
the same criteria is used to determine a match.
template_thresh : float
0-1 range. If match_template probability > template_thresh, count as detection.
target_thresh : float
target_thresh : float
0-1 range. target[target >= target_thresh] = 1, target[target < target_thresh] = 0
Hardcoded Crater Detection Hyperparameters
------------------------------------------
rw : int
thickness of rings for template match
min_rt : float
floor (r - Rad) value for rad_thresh criteria.
"""
minrad_ = 5
maxrad_ = 50
longlat_thresh2_ = 70
maxrad_ = 40
longlat_thresh2_ = 1.8
rad_thresh_ = 1.0
template_thresh_ = 0.5
target_thresh_ = 0.1
#------------------
rw = 2
min_rt = 1.01

#####################################
def template_match_t(target, minrad=minrad_, maxrad=maxrad_,
Expand Down Expand Up @@ -61,9 +72,6 @@ def template_match_t(target, minrad=minrad_, maxrad=maxrad_,
Pixel coordinates of successfully detected craters in predicted target.
"""

# thickness of rings for the templates.
rw = 2

# threshold target
target[target >= target_thresh] = 1
target[target < target_thresh] = 0
Expand Down Expand Up @@ -96,9 +104,12 @@ def template_match_t(target, minrad=minrad_, maxrad=maxrad_,
while i < N:
Long, Lat, Rad = coords.T
lo, la, r = coords[i]
diff_longlat = (Long - lo)**2 + (Lat - la)**2
diff_rad = abs(Rad - r)
index = (diff_rad < max(2.01, rad_thresh * r)) & (diff_longlat < longlat_thresh2)
minr = np.minimum(r, Rad)

dL = ((Long - lo)**2 + (Lat - la)**2) / minr
dR = abs(Rad - r)
index = ((dR < np.maximum(min_rt, rad_thresh * minr))
& (dL < longlat_thresh2))
if len(np.where(index == True)[0]) > 1:
# replace current coord with max match probability coord in
# duplicate list
Expand All @@ -111,7 +122,7 @@ def template_match_t(target, minrad=minrad_, maxrad=maxrad_,

return coords

#####################################

def template_match_t2c(target, csv_coords, minrad=minrad_, maxrad=maxrad_,
longlat_thresh2=longlat_thresh2_,
rad_thresh=rad_thresh_, template_thresh=template_thresh_,
Expand Down Expand Up @@ -181,47 +192,51 @@ def template_match_t2c(target, csv_coords, minrad=minrad_, maxrad=maxrad_,
N_csv, N_detect = len(csv_coords), len(templ_coords)
for lo, la, r in templ_coords:
csvLong, csvLat, csvRad = csv_coords.T
diff_longlat = (csvLong - lo)**2 + (csvLat - la)**2
diff_rad = abs(csvRad - r)
index = (diff_rad < max(1.01, rad_thresh * r)) & (diff_longlat < longlat_thresh2)
minr = np.minimum(r, csvRad)

dL = ((csvLong - lo)**2 + (csvLat - la)**2) / minr
dR = abs(csvRad - r)
index = ((dR < np.maximum(min_rt, rad_thresh * minr))
& (dL < longlat_thresh2))
index_True = np.where(index == True)[0]
N = len(index_True)
if N > 1:
if N > 1: # more than one csv match to extracted crater
cratervals = np.array((lo, la, r))
id_keep = index_True[0]
index[id_keep] = False
diff = np.sum((csv_coords[id_keep] - cratervals)**2)
csv_duplicates.append(csv_coords[id_keep])
for id in index_True[1:]:
dupevals = csv_coords[id]
index[id] = False
csv_duplicates.append(dupevals)
diff_ = np.sum((dupevals - cratervals)**2)
diff_ = np.sum((csv_coords[id] - cratervals)**2)
if diff_ < diff:
id_keep = id
diff = diff_
index[id_keep] = True # keep only closest match as true
csv_duplicates.append(csv_coords[id])
index[id_keep] = True # keep only closest match as true
Lo, La, R = csv_coords[id_keep].T
err_lo += abs(Lo - lo) / r
err_la += abs(La - la) / r
err_r += abs(R - r) / r
meanr = (R + r) / 2.
err_lo += abs(Lo - lo) / meanr
err_la += abs(La - la) / meanr
err_r += abs(R - r) / meanr
print("""%d GT entries matched to (%d,%d,%d) ring... counted
(%f,%f,%f) as the match.""" % (N, lo, la, r, Lo, La, r))
print(csv_duplicates)
elif N == 1:
Lo, La, R = csv_coords[index_True[0]].T
err_lo += abs(Lo - lo) / r
err_la += abs(La - la) / r
err_r += abs(R - r) / r
meanr = (R + r) / 2.
err_lo += abs(Lo - lo) / meanr
err_la += abs(La - la) / meanr
err_r += abs(R - r) / meanr
N_match += min(1, N)
# remove csv so it can't be re-matched again
csv_coords = csv_coords[np.where(index == False)]
if len(csv_coords) == 0:
break

if rmv_oor_csvs == 1:
#upper = maxr
upper = 15
lower = minrad
lower = minrad_
N_large_unmatched = len(np.where((csv_coords.T[2] > upper) |
(csv_coords.T[2] < lower))[0])
if N_large_unmatched < N_csv:
Expand Down

0 comments on commit cfc2efe

Please sign in to comment.