Commit ef8e83c6 authored by jay's avatar jay
Browse files

Merge remote-tracking branch 'upstream/master'

parents 2bd31a11 fb6eb862
Loading
Loading
Loading
Loading
+25 −1
Original line number Diff line number Diff line
{"AS15-M-0297_SML.png": ["AS15-M-0298_SML.png", "AS15-M-0300_SML.png", "AS15-M-0299_SML.png", "AS15-M-0296_SML.png"], "AS15-M-0300_SML.png": ["AS15-M-0298_SML.png", "AS15-M-0297_SML.png", "AS15-M-0299_SML.png"], "AS15-M-0299_SML.png": ["AS15-M-0298_SML.png", "AS15-M-0297_SML.png", "AS15-M-0300_SML.png", "AS15-M-0296_SML.png"], "AS15-M-0295_SML.png": [], "AS15-M-0296_SML.png": ["AS15-M-0298_SML.png", "AS15-M-0297_SML.png", "AS15-M-0299_SML.png"], "AS15-M-0298_SML.png": ["AS15-M-0297_SML.png", "AS15-M-0300_SML.png", "AS15-M-0299_SML.png", "AS15-M-0296_SML.png"]}
 No newline at end of file
{"autocnet/examples/Apollo15/AS15-M-0297_SML.png"
 : ["autocnet/examples/Apollo15/AS15-M-0298_SML.png",
    "autocnet/examples/Apollo15/AS15-M-0300_SML.png",
    "autocnet/examples/Apollo15/AS15-M-0299_SML.png",
    "autocnet/examples/Apollo15/AS15-M-0296_SML.png"],
 "autocnet/examples/Apollo15/AS15-M-0300_SML.png"
 : ["autocnet/examples/Apollo15/AS15-M-0298_SML.png",
    "autocnet/examples/Apollo15/AS15-M-0297_SML.png",
    "autocnet/examples/Apollo15/AS15-M-0299_SML.png"],
 "autocnet/examples/Apollo15/AS15-M-0299_SML.png"
 : ["autocnet/examples/Apollo15/AS15-M-0298_SML.png",
    "autocnet/examples/Apollo15/AS15-M-0297_SML.png",
    "autocnet/examples/Apollo15/AS15-M-0300_SML.png",
    "autocnet/examples/Apollo15/AS15-M-0296_SML.png"],
 "autocnet/examples/Apollo15/AS15-M-0295_SML.png"
 : [], 
 "autocnet/examples/Apollo15/AS15-M-0296_SML.png"
 : ["autocnet/examples/Apollo15/AS15-M-0298_SML.png",
    "autocnet/examples/Apollo15/AS15-M-0297_SML.png",
    "autocnet/examples/Apollo15/AS15-M-0299_SML.png"],
 "autocnet/examples/Apollo15/AS15-M-0298_SML.png"
 : ["autocnet/examples/Apollo15/AS15-M-0297_SML.png",
    "autocnet/examples/Apollo15/AS15-M-0300_SML.png",
    "autocnet/examples/Apollo15/AS15-M-0299_SML.png",
    "autocnet/examples/Apollo15/AS15-M-0296_SML.png"]}
+4 −2
Original line number Diff line number Diff line
{"AS15-M-0297_SML.png": ["AS15-M-0298_SML.png"], 
"AS15-M-0298_SML.png": ["AS15-M-0297_SML.png"]}
{"autocnet/examples/Apollo15/AS15-M-0297_SML.png"
 : ["autocnet/examples/Apollo15/AS15-M-0298_SML.png"], 
 "autocnet/examples/Apollo15/AS15-M-0298_SML.png"
 : ["autocnet/examples/Apollo15/AS15-M-0297_SML.png"]}
+169 −34
Original line number Diff line number Diff line
@@ -2,10 +2,15 @@ import os

import networkx as nx
import pandas as pd
import cv2
import numpy as np

from scipy.misc import bytescale # store image array

from autocnet.control.control import C
from autocnet.fileio import io_json

from autocnet.fileio.io_gdal import GeoDataset
from autocnet.matcher import feature_extractor as fe # extract features from image

class CandidateGraph(nx.Graph):
    """
@@ -15,6 +20,12 @@ class CandidateGraph(nx.Graph):
    ----------

    Attributes
    node_counter : int
                   The number of nodes in the graph. 
    node_name_map : dict
                    The mapping of image labels (i.e. file base names) to their
                    corresponding node indices.

    ----------
    """

@@ -22,47 +33,138 @@ class CandidateGraph(nx.Graph):
        super(CandidateGraph, self).__init__(*args, **kwargs)
        self.node_counter = 0
        node_labels = {}
        self.node_name_map = {}

        # the node_name is the relative path for the image
        for node_name, node_attributes in self.nodes_iter(data=True):

            if os.path.isabs(node_name):
                node_attributes['image_name'] = os.path.basename(node_name)
                node_attributes['image_path'] = node_name
            else:
                node_attributes['image_name'] = node_name
                node_attributes['image_path'] = None
                node_attributes['image_name'] = os.path.basename(os.path.abspath(node_name))
                node_attributes['image_path'] = os.path.abspath(node_name)

            node_labels[node_attributes['image_name']] = self.node_counter
            # fill the dictionary used for relabelling nodes with relative path keys
            node_labels[node_name] = self.node_counter
            # fill the dictionary used for mapping base name to node index
            self.node_name_map[node_attributes['image_name']] = self.node_counter
            self.node_counter += 1

        nx.relabel_nodes(self, node_labels, copy=False)

    @classmethod
    def from_adjacency_file(cls, inputfile):
        """
        Instantiate the class using an adjacency file. This file must contain relative or
        absolute paths to image files.

        Parameters
        ----------
        inputfile : str
                    The input file containing the graph representation

        Returns
        -------
         : object
           A Network graph object

        Examples
        --------
        >>> from autocnet.examples import get_path
        >>> inputfile = get_path('adjacency.json')
        >>> candidate_graph = network.CandidateGraph.from_adjacency_file(inputfile)
        """
        adjacency_dict = io_json.read_json(inputfile)
        return cls(adjacency_dict)

    def get_name(self, nodeIndex):
        """
        Get the image name for the given node.

        Parameters
        ----------
        nodeIndex : int
                    The index of the node.
        
        Returns
        -------
         : str
           The name of the image attached to the given node.


        """
        return self.node[nodeIndex]['image_name']

    def get_keypoints(self, nodeIndex):
        """
        Get the list of keypoints for the given node.
        
        Parameters
        ----------
        nodeIndex : int
                    The index of the node.
        
        Returns
        -------
         : list
           The list of keypoints for the given node.
        
        """
        return self.node[nodeIndex]['keypoints']

    def add_image(self, *args, **kwargs):
        """
        Adds an image node to the graph.

        Parameters
        ==========
        ----------

        """

        raise NotImplementedError
        self.add_node(self.node_counter, *args, **kwargs)
        #self.node_labels[self.node[self.node_counter]['image_name']] = self.node_counter
        self.node_counter += 1

    def adjacency_to_json(self, outputfile):
    def get_geodataset(self, nodeIndex):
        """
        Write the edge structure to a JSON adjacency list
        Constructs a GeoDataset object from the given node image and assigns the 
        dataset and its NumPy array to the 'handle' and 'image' node attributes.

        Parameters
        ==========
        ----------
        nodeIndex : int
                    The index of the node.

        outputfile : str
                     PATH where the JSON will be written
        """
        adjacency_dict = {}
        for n in self.nodes():
            adjacency_dict[n] = self.neighbors(n)
        io_json.write_json(adjacency_dict, outputfile)
        self.node[nodeIndex]['handle'] = GeoDataset(self.node[nodeIndex]['image_path'])
        self.node[nodeIndex]['image'] = bytescale(self.node[nodeIndex]['handle'].read_array())

    def extract_features(self, nfeatures) :
        """
        Extracts features from each image in the graph and uses the result to assign the
        node attributes for 'handle', 'image', 'keypoints', and 'descriptors'.

        Parameters
        ----------
        nfeatures : int
                    The number of features to be extracted.

    def add_matches(self, matches):
        """
        # Loop through the nodes (i.e. images) on the graph and fill in their attributes.
        # These attributes are...
        #      geo dataset (handle and image)
        #      features (keypoints and descriptors)
        for node, attributes in self.nodes_iter(data=True):
        
            self.get_geodataset(node)
            extraction_params = {'nfeatures' : nfeatures}
            attributes['keypoints'], attributes['descriptors'] = fe.extract_features(attributes['image'], 
                                                                                     extraction_params)

    def add_matches(self, matches):
        """
        Adds match data to a node and attributes the data to the
        appropriate edges, e.g. if A-B have a match, edge A-B is attributes
        with the pandas dataframe.
@@ -75,7 +177,6 @@ class CandidateGraph(nx.Graph):
        matches : dataframe
                  The pandas dataframe containing the matches
        """

        source_groups = matches.groupby('source_image')
        for i, source_group in source_groups:
            for j, dest_group in source_group.groupby('destination_image'):
@@ -92,6 +193,49 @@ class CandidateGraph(nx.Graph):
                else:
                    edge['matches'] = dest_group

    def compute_homography(self, source_key, destination_key, outlier_algorithm=cv2.RANSAC):
        """

        Parameters
        ----------
        source_key : str
                     The identifier for the source node
        destination_key : str
                          The identifier for the destination node
        Returns
        -------
         : tuple
           A tuple of the form (transformation matrix, bad entry mask)
           The returned tuple is empty if there is no edge between the source and destination nodes or
           if it exists, but has not been populated with a matches dataframe.

        """
        if self.has_edge(source_key, destination_key):
            try:
                edge = self[source_key][destination_key]
            except:
                edge = self[destination_key][source_key]
            if 'matches' in edge.keys():
                source_keypoints = []
                destination_keypoints = []

                for i, row in edge['matches'].iterrows():
                    source_idx = row['source_idx']
                    src_keypoint = [self.node[source_key]['keypoints'][int(source_idx)].pt[0],
                                    self.node[source_key]['keypoints'][int(source_idx)].pt[1]]
                    destination_idx = row['destination_idx']
                    dest_keypoint = [self.node[destination_key]['keypoints'][int(destination_idx)].pt[0],
                                     self.node[destination_key]['keypoints'][int(destination_idx)].pt[1]]

                    source_keypoints.append(src_keypoint)
                    destination_keypoints.append(dest_keypoint)
                return cv2.findHomography(np.array(source_keypoints), np.array(destination_keypoints),
                                          outlier_algorithm, 5.0)
            else:
                return ('', '')
        else:
            return ('','')

    def to_cnet(self):
        """
        Generate a control network (C) object from a graph
@@ -154,28 +298,19 @@ class CandidateGraph(nx.Graph):

        return merged_cnet

    @classmethod
    def from_adjacency(cls, inputfile):
    def to_json_file(self, outputfile):
        """
        Instantiate the class using an adjacency list
        Write the edge structure to a JSON adjacency list

        Parameters
        ----------
        inputfile : str
                    The input file containing the graph representation

        Returns
        -------
         : object
           A Network graph object
        ==========

        Examples
        --------
        >>> from autocnet.examples import get_path
        >>> inputfile = get_path('adjacency.json')
        >>> candidate_graph = network.CandidateGraph.from_adjacency(inputfile)
        outputfile : str
                     PATH where the JSON will be written
        """
        adjacency_dict = io_json.read_json(inputfile)
        return cls(adjacency_dict)
        adjacency_dict = {}
        for n in self.nodes():
            adjacency_dict[n] = self.neighbors(n)
        io_json.write_json(adjacency_dict, outputfile)

+30 −10
Original line number Diff line number Diff line
import os
import sys
sys.path.insert(0, os.path.abspath('..'))

import cv2
import numpy as np
import unittest

from autocnet.examples import get_path

import sys
sys.path.insert(0, os.path.abspath('..'))

from .. import network


class TestCandidateGraph(unittest.TestCase):
    
    def setUp(self):
        self.graph = network.CandidateGraph.from_adjacency(get_path('adjacency.json'))
        self.graph = network.CandidateGraph.from_adjacency_file(get_path('adjacency.json'))

    def test_get_name(self):
        node_number = self.graph.node_name_map['AS15-M-0297_SML.png']
        name = self.graph.get_name(node_number)
        self.assertEquals(name, 'AS15-M-0297_SML.png')

    def test_add_image(self):
        with self.assertRaises(NotImplementedError):
            self.graph.add_image()
        self.assertEqual(self.graph.node_counter, 7)

    def test_adjacency_to_json(self):
        self.graph.adjacency_to_json('test_adjacency_to_json.json')
        self.assertTrue(os.path.exists('test_adjacency_to_json.json'))
    def test_to_json_file(self):
        self.graph.to_json_file('test_graph_to_json.json')
        self.assertTrue(os.path.exists('test_graph_to_json.json'))

    def test_extract_features(self):
        # also tests get_geodataset() and get_keypoints
        self.graph.extract_features(10)
        node_number = self.graph.node_name_map['AS15-M-0297_SML.png']
        node = self.graph.node[node_number]
        self.assertEquals(len(node['image']), 1012)
        self.assertEquals(len(node['keypoints']), 10)
        self.assertEquals(len(node['descriptors']), 10)
        self.assertIsInstance(node['keypoints'][0], type(cv2.KeyPoint()))
        self.assertIsInstance(node['descriptors'][0], np.ndarray)
        self.assertEquals(self.graph.get_keypoints(node_number), node['keypoints'])


    def tearDown(self):
        try:
            os.remove('test_adjacency_to_json.json')
            os.remove('test_graph_to_json.json')
        except:
            pass
+100 −0
Original line number Diff line number Diff line
@@ -169,3 +169,103 @@ class FlannMatcher(object):
                                              'destination_image', 'destination_idx',
                                              'distance'])

class OutlierDetector(object):
    """
    A class which contains several outlier detection methods which all return
    True/False masks as pandas data series, which can be used as masks for
    the "matches" pandas dataframe which stores match information for each
    edge of the graph.

    Attributes
    ----------

    """
    def __init__(self):
        pass

    # (query only takes care of literal self-matches on a keypoint basis, not self-matches for the whole image)
    def self_neighbors(self, matches):
        """
        Returns a pandas data series intended to be used as a mask. Each row
        is True if it is not matched to a point in the same image (good) and
        False if it is (bad.)

        Parameters
        ----------
        matches : dataframe
                  the matches dataframe stored along the edge of the graph
                  containing matched points with columns containing:
                  matched image name, query index, train index, and
                  descriptor distance
        Returns
        -------
        : dataseries
          Intended to mask the matches dataframe. True means the row is not matched to a point in the same image
          and false the row is.
        """
        return matches.source_image != matches.destination_image

    def distance_ratio(self, matches, ratio=0.8):
        """
        Compute and return a mask for the matches dataframe stored on each edge of the graph
        using the ratio test and distance_ratio set during initialization.

        Parameters
        ----------
        matches : dataframe
                  the matches dataframe stored along the edge of the graph
                  containing matched points with columns containing:
                  matched image name, query index, train index, and
                  descriptor distance. ***Will only work as expected if matches already has dropped duplicates***

        ratio: float
               the ratio between the first and second-best match distances
               for each keypoint to use as a bound for marking the first keypoint
               as "good."
        Returns
        -------
         : dataseries
           Intended to mask the matches dataframe. Rows are True if the associated keypoint passes
           the ratio test and false otherwise. Keypoints without more than one match are True by
           default, since the ratio test will not work for them.
        """
        #0.8 is Lowe's paper value -- can be changed.
        mask = []
        temp_matches = matches.drop_duplicates() #don't want to deal with duplicates...
        for key, group in temp_matches.groupby('source_idx'):
            #won't work if there's only 1 match for each queryIdx
            if len(group) < 2:
                mask.append(True)
            else:
                if group['distance'].iloc[0] < ratio * group['distance'].iloc[1]: #this means distance _0_ is good and can drop all other distances
                    mask.append(True)
                    for i in range(len(group['distance']-1)):
                        mask.append(False)
                else:
                    for i in range(len(group['distance'])):
                        mask.append(False)
        return pd.Series(mask)

    def mirroring_test(self, matches):
        """
        Compute and return a mask for the matches dataframe on each edge of the graph which
        will keep only entries in which there is both a source -> destination match and a destination ->
        source match.

        Parameters
        ----------
        matches : dataframe
                  the matches dataframe stored along the edge of the graph
                  containing matched points with columns containing:
                  matched image name, query index, train index, and
                  descriptor distance

        Returns
        -------
         : dataseries
           Intended to mask the matches dataframe. Rows are True if the associated keypoint passes
           the mirroring test and false otherwise. That is, if 1->2, 2->1, both rows will be True,
           otherwise, they will be false. Keypoints with only one match will be False. Removes
           duplicate rows.
        """
        return matches.duplicated(keep='first')
Loading