Commit 7169beaa authored by Jay's avatar Jay Committed by Jason R Laura
Browse files

Refactor to support threaded concurrency. Demo in ipynb.

parent c516afc1
Loading
Loading
Loading
Loading
+51 −5
Original line number Diff line number Diff line
@@ -11,6 +11,7 @@ from autocnet.matcher import health
from autocnet.matcher import outlier_detector as od
from autocnet.matcher import suppression_funcs as spf
from autocnet.matcher import subpixel as sp
from autocnet.matcher.matcher import FlannMatcher
from autocnet.transformation.transformations import FundamentalMatrix, Homography
from autocnet.vis.graph_view import plot_edge

@@ -39,6 +40,7 @@ class Edge(dict, MutableMapping):

        self.homography = None
        self.fundamental_matrix = None
        self.matches = None
        self._subpixel_offsets = None

        self._observers = set()
@@ -80,6 +82,25 @@ class Edge(dict, MutableMapping):
    def health(self):
        return self._health.health

    def match(self, k=2):

        def mono_matches(a, b):
            fl.add(a.descriptors, a.node_id)
            fl.train()
            self._add_matches(fl.query(b.descriptors, b.node_id, k))
            fl.clear()

        fl = FlannMatcher()
        mono_matches(self.source, self.destination)
        mono_matches(self.destination, self.source)

    def _add_matches(self, matches):
        if self.matches is None:
            self.matches = matches
        else:
            df = self.matches
            self.matches = df.append(matches, ignore_index=True)

    def symmetry_check(self):
        if hasattr(self, 'matches'):
            mask = od.mirroring_test(self.matches)
@@ -103,13 +124,28 @@ class Edge(dict, MutableMapping):
            raise AttributeError('No matches have been computed for this edge.')

    def compute_fundamental_matrix(self, clean_keys=[], method='linear', **kwargs):
        """
        Estimate the fundamental matrix (F) using the correspondences tagged to this
        edge.

        if hasattr(self, 'matches'):
            matches = self.matches
        else:

        Parameters
        ----------
        clean_keys : list
                     Of strings used to apply masks to omit correspondences

        method : {linear, nonlinear}
                 Method to use to compute F.  Linear is significantly faster at
                 the cost of reduced accuracy.

        See Also
        --------
        autocnet.transformation.transformations.FundamentalMatrix
       :
        """
        if not hasattr(self, 'matches'):
            raise AttributeError('Matches have not been computed for this edge')
            return

        matches, mask = self._clean(clean_keys)

        s_keypoints = self.source.get_keypoint_coordinates(index=matches['source_idx'],
@@ -125,6 +161,7 @@ class Edge(dict, MutableMapping):
        except:
            return


        # Convert the truncated RANSAC mask back into a full length mask
        mask[mask] = fundam_mask
        # Pass in the truncated mask to the fundamental matrix.  These are
@@ -135,6 +172,9 @@ class Edge(dict, MutableMapping):
                                                    mask=mask,
                                                    local_mask=fundam_mask)

        if method != 'linear':
            self.fundamental_matrix.refine_with_mle(method=method)

        # Subscribe the health watcher to the fundamental matrix observable
        self.fundamental_matrix.subscribe(self._health.update)
        self.fundamental_matrix._notify_subscribers(self.fundamental_matrix)
@@ -142,6 +182,12 @@ class Edge(dict, MutableMapping):
        # Set the initial state of the fundamental mask in the masks
        self.masks = ('fundamental', mask)

    def add_putative_matches(self):
        if not hasattr(self, 'fundamental_matrix'):
            raise(ValueError, 'Fundamental matric has not been computed')

        F = self.fundamental_matrix

    def compute_homography(self, method='ransac', clean_keys=[], pid=None, **kwargs):
        """
        For each edge in the (sub) graph, compute the homography
+4 −69
Original line number Diff line number Diff line
@@ -4,8 +4,6 @@ import warnings

import dill as pickle
import networkx as nx
import numpy as np
import pandas as pd

from autocnet.control.control import CorrespondenceNetwork
from autocnet.fileio import io_hdf
@@ -15,7 +13,6 @@ from autocnet.fileio.io_gdal import GeoDataset
from autocnet.graph import markov_cluster
from autocnet.graph.edge import Edge
from autocnet.graph.node import Node
from autocnet.matcher.matcher import FlannMatcher
from autocnet.vis.graph_view import plot_graph


@@ -279,76 +276,15 @@ class CandidateGraph(nx.Graph):

        hdf = None

    def match_features(self, k=None):
    def match_features(self, **kwargs):
        """
        For all connected edges in the graph, apply feature matching

        Parameters
        See Also
        ----------
        k : int
            The number of matches to find per feature.
        """
        # Instantiate a single flann matcher to be resused for all nodes

        self._fl = FlannMatcher()
        for i, node in self.nodes_iter(data=True):

            # Grab the descriptors
            if not hasattr(node, 'descriptors'):
                raise AttributeError('Descriptors must be extracted before matching can occur.')
            descriptors = node.descriptors
            # Load the neighbors of the current node into the FLANN matcher
            neighbors = self.neighbors(i)

            # if node has no neighbors, skip
            if not neighbors:
                continue

            for n in neighbors:
                neighbor_descriptors = self.node[n].descriptors
                self._fl.add(neighbor_descriptors, n)
            self._fl.train()

            if k is None:
                k = (self.degree(i) * 2)

            # Query and then empty the FLANN matcher for the next node
            matches = self._fl.query(descriptors, i, k=k)
            self.add_matches(matches)

            self._fl.clear()

    def add_matches(self, matches):
        autocnet.graph.edge.Edge.match
        """
        Adds match data to a node and attributes the data to the
        appropriate edges, e.g. if A-B have a match, edge A-B is attributed
        with the pandas dataframe.

        Parameters
        ----------
        matches : dataframe
                  The pandas dataframe containing the matches
        """
        edges = self.edges()
        source_groups = matches.groupby('source_image')
        for i, source_group in source_groups:
            for j, dest_group in source_group.groupby('destination_image'):
                destination_key = int(dest_group['destination_image'].values[0])
                source_key = int(dest_group['source_image'].values[0])
                if (source_key, destination_key) in edges:
                    edge = self.edge[source_key][destination_key]
                else:
                    edge = self.edge[destination_key][source_key]
                    dest_group.rename(columns={'source_image': 'destination_image',
                                               'source_idx': 'destination_idx',
                                               'destination_image': 'source_image',
                                               'destination_idx': 'source_idx'},
                                      inplace=False)
                if hasattr(edge, 'matches'):
                    df = edge.matches
                    edge.matches = df.append(dest_group, ignore_index=True)
                else:
                    edge.matches = dest_group
        self.apply_func_to_edges('match', *args, **kwargs)

    def compute_clusters(self, func=markov_cluster.mcl, *args, **kwargs):
        """
@@ -528,7 +464,6 @@ class CandidateGraph(nx.Graph):
                matches = edge.matches
            self.cn.add_correspondences(edge, matches)


    def to_json_file(self, outputfile):
        """
        Write the edge structure to a JSON adjacency list
+1 −1
Original line number Diff line number Diff line
@@ -226,7 +226,7 @@ class Node(dict, MutableMapping):

        allkps = pd.DataFrame(data=clean_kps, columns=columns, index=index)

        self._keypoints = allkps.sort_values(by='response', ascending=False).head(250)
        self._keypoints = allkps.sort_values(by='response', ascending=False)

        if isinstance(in_path, str):
            hdf = None
+100 −218

File changed.

Preview size limit exceeded, changes collapsed.