Skip to content

Commit

Permalink
multi-scale with proper blur
Browse files Browse the repository at this point in the history
  • Loading branch information
lzx551402 committed Feb 21, 2020
1 parent b1c9c28 commit d312858
Show file tree
Hide file tree
Showing 5 changed files with 47 additions and 13 deletions.
5 changes: 3 additions & 2 deletions configs/imw2020_eval.yaml
Original file line number Diff line number Diff line change
@@ -1,18 +1,19 @@
data_name: 'imw2020'
data_split: 'val' # val or test
data_root: '/local/data/phototourism'
dump_root: '/local/imw2020_val_aslfeat_2k'
dump_root: '/local/imw2020_val_aslfeat_ms_8k'
truncate: [0, null]
model_path: 'pretrained/aslfeat/model.ckpt-380000'
overwrite: true
net:
max_dim: 2048
config:
kpt_n: 2048
kpt_n: 8000
kpt_refinement: true
deform_desc: 1
score_thld: 0.5
edge_thld: -1
multi_pass: true
multi_level: true
nms_size: 3
eof_mask: 5
Expand Down
3 changes: 2 additions & 1 deletion configs/matching_eval.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,12 @@ net:
deform_desc: 1
score_thld: 0.5
edge_thld: -1
multi_pass: true
multi_level: true
nms_size: 3
eof_mask: 5
need_norm: true
use_peakiness: true
match:
ratio_test: 0.8
ratio_test: 0.8
cross_check: true
6 changes: 3 additions & 3 deletions datasets/imw2020.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,6 @@ def _format_data(self, data):
feat = data['dump_data'][0]
kpt = data['dump_data'][1]
score = data['dump_data'][2]
_ = gen_kpt_f.create_dataset(basename, data=kpt, dtype='f')
_ = gen_desc_f.create_dataset(basename, data=feat, dtype='f')
_ = gen_score_f.create_dataset(basename, data=score, dtype='f')
_ = gen_kpt_f.create_dataset(basename, data=kpt)
_ = gen_desc_f.create_dataset(basename, data=feat)
_ = gen_score_f.create_dataset(basename, data=score)
8 changes: 6 additions & 2 deletions image_matching.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ def extract_local_features(gray_list, model_path, config):
kpts = []
for gray_img in gray_list:
desc, kpt, _ = model.run_test_data(gray_img)
print('feature_num', kpt.shape[0])
descs.append(desc)
kpts.append(kpt)
return descs, kpts
Expand All @@ -57,9 +58,12 @@ def main(argv=None): # pylint: disable=unused-argument
descs[0], descs[1], kpts[0], kpts[1],
ratio=config['match']['ratio_test'], cross_check=config['match']['cross_check'],
err_thld=3, ransac=True, info='ASLFeat')

# draw matches
disp = matcher.draw_matches(rgb_list[0], kpts[0], rgb_list[1], kpts[1], match, mask)
plt.imsave('disp.jpg', disp)

output_name = 'disp.jpg'
print('image save to', output_name)
plt.imsave(output_name, disp)


if __name__ == '__main__':
Expand Down
38 changes: 33 additions & 5 deletions models/feat_model.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import sys
import math

import cv2
import numpy as np
Expand All @@ -21,15 +22,42 @@ def _run(self, data):
assert len(data.shape) == 3
max_dim = max(data.shape[0], data.shape[1])
H, W, _ = data.shape

if max_dim > self.config['max_dim']:
downsample_ratio = self.config['max_dim'] / float(max_dim)
data = cv2.resize(data, (0, 0), fx=downsample_ratio, fy=downsample_ratio)
data = data[..., np.newaxis]
feed_dict = {"input:0": np.expand_dims(data, 0)}
returns = self.sess.run(self.endpoints, feed_dict=feed_dict)
descs = np.squeeze(returns['descs'], axis=0)
kpts = np.squeeze(returns['kpts'], axis=0) * np.array([W / data.shape[1], H / data.shape[0]])
scores = np.squeeze(returns['scores'], axis=0)
data_size = data.shape

if self.config['config']['multi_pass']:
scale_f = 1 / (2**0.50)
min_scale = max(0.3, 128 / max(H, W))
n_scale = math.floor(max(math.log(min_scale) / math.log(scale_f), 1))
sigma = 0.8
else:
n_scale = 1

descs, kpts, scores = [], [], []
for i in range(n_scale):
if i > 0:
data = cv2.GaussianBlur(data, None, sigma / scale_f)
data = cv2.resize(data, dsize=None, fx=scale_f, fy=scale_f)[..., np.newaxis]

feed_dict = {"input:0": np.expand_dims(data, 0)}
returns = self.sess.run(self.endpoints, feed_dict=feed_dict)
descs.append(np.squeeze(returns['descs'], axis=0))
kpts.append(np.squeeze(returns['kpts'], axis=0) * np.array([W / data.shape[1], H / data.shape[0]], dtype=np.float32))
scores.append(np.squeeze(returns['scores'], axis=0))

descs = np.concatenate(descs, axis=0)
kpts = np.concatenate(kpts, axis=0)
scores = np.concatenate(scores, axis=0)

idxs = np.negative(scores).argsort()[0:self.config['config']['kpt_n']]

descs = descs[idxs]
kpts = kpts[idxs] * np.array([W / data_size[1], H / data_size[0]], dtype=np.float32)
scores = scores[idxs]
return descs, kpts, scores

def _construct_network(self):
Expand Down

0 comments on commit d312858

Please sign in to comment.