Commit 52080f2a authored by Jay's avatar Jay Committed by Jason R Laura
Browse files

Adds the ability to deepen a network and write the network object.

parent c5bb5037
Loading
Loading
Loading
Loading
+7 −12
Original line number Diff line number Diff line
@@ -13,6 +13,7 @@ from autocnet.fileio.io_gdal import GeoDataset
from autocnet.graph import markov_cluster
from autocnet.graph.edge import Edge
from autocnet.graph.node import Node
from autocnet.matcher.add_depth import deepen_correspondences
from autocnet.vis.graph_view import plot_graph


@@ -460,28 +461,22 @@ class CandidateGraph(nx.Graph):
            filelist.append(node.image_path)
        return filelist

    def generate_cnet(self, clean_keys=[]):
    def generate_cnet(self, *args, deepen=False, **kwargs):
        """
        Compute (or re-compute) a CorrespondenceNetwork attribute

        Parameters
        ----------
        clean_keys : list
                     of string clean keys to mask correspondences
        deepen : bool
                 Whether or not to attempt to punch through correspondences.  Default: False

        See Also
        --------
        autocnet.control.control.CorrespondenceNetwork
        autocnet.graph.node.Node

        """
        self.cn = CorrespondenceNetwork()

        for s, d, edge in self.edges_iter(data=True):
            if clean_keys:
                matches, _ = edge._clean(clean_keys)
            else:
                matches = edge.matches
            self.cn.add_correspondences(edge, matches)
        for i, n in self.nodes_iter(data=True):
            n.group_correspondences(self, *args, deepen=deepen, **kwargs)

    def to_json_file(self, outputfile):
        """
+91 −1
Original line number Diff line number Diff line
from collections import MutableMapping
from collections import defaultdict, MutableMapping
import os
import warnings

@@ -7,8 +7,10 @@ import pandas as pd
from scipy.misc import bytescale

from autocnet.cg import cg
from autocnet.control.control import Correspondence, Point
from autocnet.fileio.io_gdal import GeoDataset
from autocnet.fileio import io_hdf
from autocnet.matcher.add_depth import deepen_correspondences
from autocnet.matcher import feature_extractor as fe
from autocnet.matcher import outlier_detector as od
from autocnet.matcher import suppression_funcs as spf
@@ -48,6 +50,7 @@ class Node(dict, MutableMapping):
        self.image_path = image_path
        self.node_id = node_id
        self._mask_arrays = {}
        self.point_to_correspondence = defaultdict(set)

    def __repr__(self):
        return """
@@ -302,6 +305,93 @@ class Node(dict, MutableMapping):
        if isinstance(out_path, str):
            hdf = None

    def group_correspondences(self, cg, *args, clean_keys=['fundamental'], deepen=False, **kwargs):
        """

        Parameters
        ----------
        cg : object
             The graph object this node is a member of

        deepen : bool
                 If True, attempt to punch matches through to all incident edges.  Default: False
        """
        node = self.node_id
        # Get the edges incident to the current node
        incident_edges = set(cg.edges(node)).intersection(set(cg.edges()))

        # If this node is free floating, ignore it.
        if not incident_edges:
             # TODO: Add dangling correspondences to control network anyway.  Subgraphs handle this segmentation if req.
            return

        # Grab all the incident edge matches and concatenate into a group match set.
        # All share the same source node
        edge_matches = []
        for e in incident_edges:
            edge = cg[e[0]][e[1]]
            matches, mask = edge._clean(clean_keys=clean_keys)
            # Add a depth mask that initially mirrors the fundamental mask
            edge_matches.append(matches)
        d = pd.concat(edge_matches)

        # Counter for point identifiers
        pid = 0

        # Iterate through all of the correspondences and attempt to add additional correspondences using
        # the epipolar constraint
        for idx, g in d.groupby('source_idx'):
            # Pull the source index to be used as the search
            source_idx = g['source_idx'].values[0]

            # Add the point object onto the node
            point = Point(pid)

            covered_edges = list(map(tuple, g[['source_image', 'destination_image']].values))
            # The reference edge that we are deepening with
            ab = cg.edge[covered_edges[0][0]][covered_edges[0][1]]

            # Get the coordinates of the search correspondence
            ab_keypoints = ab.source.get_keypoint_coordinates(index=g['source_idx'])
            ab_x = None

            for j, (r_idx, r) in enumerate(g.iterrows()):
                kp = ab_keypoints.iloc[j].values

                # Homogenize the coord used for epipolar projection
                if ab_x is None:
                    ab_x = np.array([kp[0], kp[1], 1.])

                kpd = ab.destination.get_keypoint_coordinates(index=g['destination_idx']).values[0]

                # Add the existing source and destination correspondences
                self.point_to_correspondence[point].add((r['source_image'],
                                                                  Correspondence(r['source_idx'],
                                                                                 kp[0],
                                                                                 kp[1],
                                                                                 serial=self.isis_serial)))
                self.point_to_correspondence[point].add((r['destination_image'],
                                                                  Correspondence(r['destination_idx'],
                                                                                 kpd[0],
                                                                                 kpd[1],
                                                                                 serial=cg.node[r['destination_image']].isis_serial)))

            # If the user wants to punch correspondences through
            if deepen:
                search_edges = incident_edges.difference(set(covered_edges))
                for search_edge in search_edges:
                    bc = cg.edge[search_edge[0]][search_edge[1]]
                    coords, idx = deepen_correspondences(ab_x, bc, source_idx)

                    if coords is not None:
                        cg.node[node].point_to_correspondence[point].add((search_edge[1],
                                                                          Correspondence(idx,
                                                                                         coords[0],
                                                                                         coords[1],
                                                                                         serial=cg.node[search_edge[1]].isis_serial)))

            pid += 1

    def suppress(self, func=spf.response, **kwargs):
        if not hasattr(self, '_keypoints'):
            raise AttributeError('No keypoints extracted for this node.')
+71 −0
Original line number Diff line number Diff line
import numpy as np

from autocnet.utils.utils import normalize_vector


def deepen_correspondences(ab_kp, bc, source_idx,
                           clean_keys=['fundamental'],
                           geometric_threshold=2):
    """
    Given a set of input correspondences, use the fundamental matrix to search
    for additional correspondences.

    The algorithm functions by selecting all edges incident to the given node,
    concatenating the dataframes of matches into a single large table, and then
    grouping those matches by the current node's correspondence index.  In an
    idealized case, the number of entries in each group would equal the number
    of incident edges.  When this is not true, the point in the source image
    is projected to the epipolar line in the destination image and a search
    of previously omitted points is performed.  Should a previously omitted
    point fulfill the geometric constraint, the match is added to the
    currently valid set.

    Parameters
    ----------
    ab_kp : ndarray
            Homogeneous point that is projected to an epipolar line in bc

    bc : object
         Edge object with points that are searched along
         the epipolar line defined by ab

    source_idx : int
                 Index into bc identifying candidate matches

    geometric_threshold : float
                          The maximum projection error, in pixels, a point can be
                          from the corresponding epipolar line to still be considered
                          an inlier.
    """

    # Grab the edge and the edge candidate coordinates
    bc_x = np.empty((bc.destination.nkeypoints, 3))
    bc_x[:, -1] = 1.0
    bc_x[:, :2] = bc.destination.get_keypoint_coordinates().values

    # Grab F for reprojection
    f_matrix = bc.fundamental_matrix

    # Compute the epipolar line projecting point ab into bc
    epipolar_line = normalize_vector(ab_kp.dot(f_matrix.T))

    # Check to see if a previously removed candidate fulfills the threshold geometric constraint
    bc_candidates = bc.matches[(bc.matches['source_idx'] == source_idx)]
    bc_candidate_coords = np.empty((len(bc_candidates), 3))
    bc_candidate_coords[:, -1] = 1.
    bc_candidate_coords[:, :2] = bc.destination.get_keypoint_coordinates(index=bc_candidates['destination_idx']).values
    bc_distance = np.abs(epipolar_line.dot(bc_candidate_coords.T))

    # Get the matches
    second_order_candidates = np.where(bc_distance < geometric_threshold)[0]

    # In testing, every single valid second order candidate has a single, duplicated entry.
    # That is, the correspondence has passed symmetry, but failed some other check.  Therefore,
    # an additional descriptor distance check is omitted here.
    if len(second_order_candidates) > 0:
        # Update the mask to include this new point
        new_match = bc_candidates.iloc[second_order_candidates[0]]
        coords = bc_candidate_coords[second_order_candidates[0]]
        return coords, new_match.name
    else:
        return None, None