From 3b597d76803c66ad507f686771947aad0cd55676 Mon Sep 17 00:00:00 2001 From: Anton Bushuiev Date: Mon, 24 Oct 2022 23:23:42 +0200 Subject: [PATCH 1/4] Extend to custom keypoints (prints should be removed) --- match_pairs.py | 23 ++++++++++++++++++++++- models/matching.py | 10 ++++++++-- models/superpoint.py | 15 ++++++++++++--- 3 files changed, 42 insertions(+), 6 deletions(-) diff --git a/match_pairs.py b/match_pairs.py index 7079687c..0fb7a151 100755 --- a/match_pairs.py +++ b/match_pairs.py @@ -50,6 +50,7 @@ import numpy as np import matplotlib.cm as cm import torch +import pickle from models.matching import Matching @@ -77,6 +78,13 @@ '--output_dir', type=str, default='dump_match_pairs/', help='Path to the directory in which the .npz results and optionally,' 'the visualization images are written') + parser.add_argument( + '--input_points', type=str, default=None, + help='Path to the directory in which the .pkl files with optional custom keypoints' + 'are stored. Each file should be named as a _.pkl of names from' + 'input_pairs file (the same as a format of output_dir). And each file' + 'shold contain a pair of [n_points x 2] integer tensors which store' + 'coordinates of custom keypoints.') parser.add_argument( '--max_length', type=int, default=-1, @@ -269,9 +277,22 @@ exit(1) timer.update('load_image') + # Load the optional custom points + if opt.input_points is not None: + input_points_dir = Path(opt.input_points) + with open(input_points_dir / f'{stem0}_{stem1}.pkl', 'rb') as f: + pts0, pts1 = pickle.load(f) + else: + pts0, pts1 = None, None + if do_match: # Perform the matching. - pred = matching({'image0': inp0, 'image1': inp1}) + matching_args = {'image0': inp0, 'image1': inp1} + if pts0 is not None: + matching_args['points0'] = pts0 + if pts1 is not None: + matching_args['points1'] = pts1 + pred = matching(matching_args) pred = {k: v[0].cpu().numpy() for k, v in pred.items()} kpts0, kpts1 = pred['keypoints0'], pred['keypoints1'] matches, conf = pred['matches0'], pred['matching_scores0'] diff --git a/models/matching.py b/models/matching.py index 5d174208..3b0f5acd 100644 --- a/models/matching.py +++ b/models/matching.py @@ -63,10 +63,16 @@ def forward(self, data): # Extract SuperPoint (keypoints, scores, descriptors) if not provided if 'keypoints0' not in data: - pred0 = self.superpoint({'image': data['image0']}) + superpoint_args0 = {'image': data['image0']} + if 'points0' in data: + superpoint_args0['points'] = data['points0'] + pred0 = self.superpoint(superpoint_args0) pred = {**pred, **{k+'0': v for k, v in pred0.items()}} if 'keypoints1' not in data: - pred1 = self.superpoint({'image': data['image1']}) + superpoint_args1 = {'image': data['image1']} + if 'points1' in data: + superpoint_args1['points'] = data['points1'] + pred1 = self.superpoint(superpoint_args1) pred = {**pred, **{k+'1': v for k, v in pred1.items()}} # Batch all features diff --git a/models/superpoint.py b/models/superpoint.py index b837d938..faa19d06 100644 --- a/models/superpoint.py +++ b/models/superpoint.py @@ -144,6 +144,8 @@ def __init__(self, config): def forward(self, data): """ Compute keypoints, scores, descriptors for image """ + print('print(data[\'image\'].shape)') + print(data['image'].shape) # Shared Encoder x = self.relu(self.conv1a(data['image'])) x = self.relu(self.conv1b(x)) @@ -162,14 +164,21 @@ def forward(self, data): scores = self.convPb(cPa) scores = torch.nn.functional.softmax(scores, 1)[:, :-1] b, _, h, w = scores.shape + print('print(scores.shape)') + print(scores.shape) scores = scores.permute(0, 2, 3, 1).reshape(b, h, w, 8, 8) scores = scores.permute(0, 1, 3, 2, 4).reshape(b, h*8, w*8) scores = simple_nms(scores, self.config['nms_radius']) # Extract keypoints - keypoints = [ - torch.nonzero(s > self.config['keypoint_threshold']) - for s in scores] + if 'points' not in data: + keypoints = [ + torch.nonzero(s > self.config['keypoint_threshold']) + for s in scores] + else: + keypoints = [data['points'].long()] + print(scores.shape) + print(keypoints[0].shape) scores = [s[tuple(k.t())] for s, k in zip(scores, keypoints)] # Discard keypoints near the image borders From 8e6cb5523caccf29a5d9151e1bd6b572ee15501a Mon Sep 17 00:00:00 2001 From: Anton Bushuiev Date: Sun, 30 Oct 2022 16:14:28 +0100 Subject: [PATCH 2/4] Fix bug with name collisions --- match_pairs.py | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/match_pairs.py b/match_pairs.py index 0fb7a151..d14dcec4 100755 --- a/match_pairs.py +++ b/match_pairs.py @@ -63,6 +63,15 @@ torch.set_grad_enabled(False) +def pair_names_to_id(names): + # Remove extention + names = [str(Path(name).with_suffix('')) for name in names] + # Replace '/' + names = [name.replace('/', '__') for name in names] + # Concat + return names[0] + '___' + names[1] + + if __name__ == '__main__': parser = argparse.ArgumentParser( description='Image pair matching and pose evaluation with SuperGlue', @@ -217,11 +226,11 @@ for i, pair in enumerate(pairs): name0, name1 = pair[:2] stem0, stem1 = Path(name0).stem, Path(name1).stem - matches_path = output_dir / '{}_{}_matches.npz'.format(stem0, stem1) - eval_path = output_dir / '{}_{}_evaluation.npz'.format(stem0, stem1) - viz_path = output_dir / '{}_{}_matches.{}'.format(stem0, stem1, opt.viz_extension) - viz_eval_path = output_dir / \ - '{}_{}_evaluation.{}'.format(stem0, stem1, opt.viz_extension) + pair_id = pair_names_to_id((name0, name1)) + matches_path = output_dir / f'{pair_id}_matches.npz' + eval_path = output_dir / f'{pair_id}_evaluation.npz' + viz_path = output_dir / f'{pair_id}_matches.{opt.viz_extension}' + viz_eval_path = output_dir / f'{pair_id}_evaluation.{opt.viz_extension}' # Handle --cache logic. do_match = True @@ -280,7 +289,7 @@ # Load the optional custom points if opt.input_points is not None: input_points_dir = Path(opt.input_points) - with open(input_points_dir / f'{stem0}_{stem1}.pkl', 'rb') as f: + with open(input_points_dir / f'{pair_id}.pkl', 'rb') as f: pts0, pts1 = pickle.load(f) else: pts0, pts1 = None, None @@ -296,11 +305,13 @@ pred = {k: v[0].cpu().numpy() for k, v in pred.items()} kpts0, kpts1 = pred['keypoints0'], pred['keypoints1'] matches, conf = pred['matches0'], pred['matching_scores0'] + matches1, conf1 = pred['matches1'], pred['matching_scores1'] timer.update('matcher') # Write the matches to disk. out_matches = {'keypoints0': kpts0, 'keypoints1': kpts1, - 'matches': matches, 'match_confidence': conf} + 'matches0': matches, 'match_confidence0': conf, + 'matches1': matches1, 'match_confidence1': conf1} np.savez(str(matches_path), **out_matches) # Keep the matching keypoints. From 1b93c6e5c9918bb072b59d88b15a846ec9ad6e9c Mon Sep 17 00:00:00 2001 From: Anton Bushuiev Date: Sun, 30 Oct 2022 16:15:31 +0100 Subject: [PATCH 3/4] Add padding to operate on any resolution --- models/superpoint.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/models/superpoint.py b/models/superpoint.py index faa19d06..2c0050c7 100644 --- a/models/superpoint.py +++ b/models/superpoint.py @@ -144,10 +144,14 @@ def __init__(self, config): def forward(self, data): """ Compute keypoints, scores, descriptors for image """ - print('print(data[\'image\'].shape)') - print(data['image'].shape) + x = data['image'] + original_shape = x.shape + + w_pad = original_shape[2] % 8 + h_pad = original_shape[3] % 8 + x = nn.functional.pad(x, (0, 0, w_pad, h_pad)) # Shared Encoder - x = self.relu(self.conv1a(data['image'])) + x = self.relu(self.conv1a(x)) x = self.relu(self.conv1b(x)) x = self.pool(x) x = self.relu(self.conv2a(x)) @@ -164,8 +168,6 @@ def forward(self, data): scores = self.convPb(cPa) scores = torch.nn.functional.softmax(scores, 1)[:, :-1] b, _, h, w = scores.shape - print('print(scores.shape)') - print(scores.shape) scores = scores.permute(0, 2, 3, 1).reshape(b, h, w, 8, 8) scores = scores.permute(0, 1, 3, 2, 4).reshape(b, h*8, w*8) scores = simple_nms(scores, self.config['nms_radius']) @@ -177,11 +179,12 @@ def forward(self, data): for s in scores] else: keypoints = [data['points'].long()] - print(scores.shape) - print(keypoints[0].shape) + scores = [s[tuple(k.t())] for s, k in zip(scores, keypoints)] # Discard keypoints near the image borders + # print(keypoints) + # print(scores) keypoints, scores = list(zip(*[ remove_borders(k, s, self.config['remove_borders'], h*8, w*8) for k, s in zip(keypoints, scores)])) From cb0842ec3ca97a1f301476ad88edb0e889539773 Mon Sep 17 00:00:00 2001 From: Anton Bushuiev Date: Sun, 30 Oct 2022 19:56:41 +0100 Subject: [PATCH 4/4] Minor refactoring --- models/superpoint.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/models/superpoint.py b/models/superpoint.py index 2c0050c7..9b4b8579 100644 --- a/models/superpoint.py +++ b/models/superpoint.py @@ -144,12 +144,13 @@ def __init__(self, config): def forward(self, data): """ Compute keypoints, scores, descriptors for image """ + # Pad x = data['image'] original_shape = x.shape - w_pad = original_shape[2] % 8 h_pad = original_shape[3] % 8 x = nn.functional.pad(x, (0, 0, w_pad, h_pad)) + # Shared Encoder x = self.relu(self.conv1a(x)) x = self.relu(self.conv1b(x)) @@ -183,8 +184,6 @@ def forward(self, data): scores = [s[tuple(k.t())] for s, k in zip(scores, keypoints)] # Discard keypoints near the image borders - # print(keypoints) - # print(scores) keypoints, scores = list(zip(*[ remove_borders(k, s, self.config['remove_borders'], h*8, w*8) for k, s in zip(keypoints, scores)]))