Commit 325db1f7 authored by Jay's avatar Jay Committed by jay
Browse files

Subpixel df removed and added to matches, matches observable, observable base class, tests

parent 186ae49a
Loading
Loading
Loading
Loading
+34 −81
Original line number Diff line number Diff line
@@ -10,6 +10,7 @@ from autocnet.cg.cg import convex_hull_ratio
from autocnet.cg.cg import overlapping_polygon_area
from autocnet.matcher import health
from autocnet.matcher import outlier_detector as od
from autocnet.matcher import suppression_funcs as spf
from autocnet.matcher import subpixel as sp
from autocnet.transformation.transformations import FundamentalMatrix, Homography
from autocnet.vis.graph_view import plot_edge
@@ -256,10 +257,9 @@ class Edge(dict, MutableMapping):
                      without being considered an outlier
        """
        matches = self.matches
        self.subpixel_offsets = pd.DataFrame(0, index=matches.index, columns=['x_offset',
                                                                              'y_offset',
                                                                              'correlation',
                                                                              's_idx', 'd_idx'])
        for column in ['x_offset', 'y_offset', 'correlation']:
            if not column in self.matches.columns:
                self.matches[column] = 0

        # Build up a composite mask from all of the user specified masks
        if clean_keys:
@@ -287,21 +287,19 @@ class Edge(dict, MutableMapping):

            try:
                x_off, y_off, strength = sp.subpixel_offset(s_template, d_search, upsampling=upsampling)
                self.subpixel_offsets.loc[idx] = [x_off, y_off, strength,s_idx, d_idx]
                self.matches.loc[idx, ('x_offset', 'y_offset', 'correlation')] = [x_off, y_off, strength]
            except:
                warnings.warn('Template-Search size mismatch, failing for this correspondence point.')
                continue
        self.subpixel_offsets.to_sparse(fill_value=0.0)

        # Compute the mask for correlations less than the threshold
        threshold_mask = self.subpixel_offsets['correlation'] >= threshold
        threshold_mask = self.matches['correlation'] >= threshold

        # Compute the mask for the point shifts that are too large
        subp= self.subpixel_offsets
        query_string = 'x_offset <= -{0} or x_offset >= {0} or y_offset <= -{1} or y_offset >= {1}'.format(max_x_shift,
                                                                                                           max_y_shift)
        sp_shift_outliers = subp.query(query_string)
        shift_mask = pd.Series(True, index=self.subpixel_offsets.index)
        sp_shift_outliers = self.matches.query(query_string)
        shift_mask = pd.Series(True, index=self.matches.index)
        shift_mask[sp_shift_outliers.index] = False

        # Generate the composite mask and write the masks to the mask data structure
@@ -310,82 +308,37 @@ class Edge(dict, MutableMapping):
        self.masks = ('threshold', threshold_mask)
        self.masks = ('subpixel', mask)

    def suppress(self, min_radius=1, k=100, error_k=0.1):
        """
        Suppress subpixel registered points to that k +- k * error_k
        points, with good spatial distribution, remain
    def suppress(self, func=spf.correlation, clean_keys=[], **kwargs):
        if not hasattr(self, 'matches'):
            raise AttributeError('This edge does not yet have any matches computed.')

        Adds a suppression mask to the edge mask dataframe.
        # Build up a composite mask from all of the user specified masks
        if clean_keys:
            matches, mask = self._clean(clean_keys)
        else:
            matches = self.matches

        Parameters
        ----------
        min_radius : int
                     The lowest acceptable radius value for points
        domain = self.source.handle.raster_size

        k : int
            The desired number of output points
        # Massage the dataframe into the correct structure
        coords = self.source.keypoints.loc[matches['source_idx']][['x', 'y']]
        matches = matches.merge(coords, left_on=['source_idx'], right_index=True)
        matches['strength'] = self.matches.apply(func, axis=1)

        error_k : float
                  [0,1) The acceptable epsilon
        """
        xy_extent = self.source.handle.xy_extent[1]
        max_radius = min(xy_extent) / 4
        k = 100

        sp_mask = self.masks['subpixel']
        sp_values = self.subpixel_offsets[sp_mask]

        coordinates = self.source.keypoints.iloc[sp_values['s_idx']][['x', 'y']]
        merged = pd.merge(sp_values, coordinates, left_on='s_idx', how='left', right_index=True).sort_values(by='correlation')

        previous_cell_size = 0

        while True:
            r = (min_radius + max_radius) / 2
            cell_size = int(r / math.sqrt(2))

            # To prevent cycling
            if cell_size == previous_cell_size:
                break
            previous_cell_size = cell_size

            # Setup to store results
            result = []
            # Compute the bin edges and assign points to the appropriate bins
            x_edges = np.arange(0,xy_extent[0], int(xy_extent[0] / cell_size))
            y_edges = np.arange(0,xy_extent[1], int(xy_extent[1] / cell_size))
            grid = np.zeros((len(y_edges), len(x_edges)), dtype=np.bool)

            xbins = np.digitize(merged['x'], bins=x_edges)
            ybins = np.digitize(merged['y'], bins=y_edges)

            for i, (idx, p) in enumerate(merged.iterrows()):
                x_center = xbins[i]
                y_center = ybins[i]
                cell = grid[y_center-1 , x_center-1]
                if cell == False:
                    result.append(idx)
                    if len(result) > k:
                        # Search the lower half, the radius is too big
                        max_radius = r
                        break

                    # Cover the necessary cells
                    grid[y_center - 5: y_center + 5,
                         x_center - 5:x_center + 5] = True

            # Check break conditions
            if k - k * error_k < len(result) < k + k * error_k:
                break
            elif len(result) < k:
                # Search the upper half, the radius is too small
                min_radius = r
            elif abs(max_radius - min_radius) < 5:
                break

        mask = pd.Series(False, self.masks.index)
        mask.iloc[np.array(result)] = True
        if not hasattr(self, 'suppression'):
            # Instantiate the suppression object and suppress matches
            self.suppression = od.SpatialSuppression(matches, domain, **kwargs)
            self.suppression.suppress()
        else:
            for k, v in kwargs.items():
                if hasattr(self.suppression, k):
                    setattr(self.suppression, k, v)
            self.suppression.suppress()

        if clean_keys:
            mask[mask == True] = self.suppression.mask
        else:
            mask = self.suppression.mask
        self.masks = ('suppression', mask)

    def coverage_ratio(self, clean_keys=[]):
+4 −9
Original line number Diff line number Diff line
@@ -211,9 +211,7 @@ class CandidateGraph(nx.Graph):
        Parameters
        ----------
        k : int
            The number of matches, minus 1, to find per feature.  For example
            k=5 will find the 4 nearest neighbors for every extracted feature.
            If None,  k = (2 * the number of edges connecting a node) +1
            The number of matches to find per feature.
        """
        # Instantiate a single flann matcher to be resused for all nodes

@@ -224,7 +222,6 @@ class CandidateGraph(nx.Graph):
            if not hasattr(node, 'descriptors'):
                raise AttributeError('Descriptors must be extracted before matching can occur.')
            descriptors = node.descriptors

            # Load the neighbors of the current node into the FLANN matcher
            neighbors = self.neighbors(i)
            for n in neighbors:
@@ -392,9 +389,6 @@ class CandidateGraph(nx.Graph):
            if clean_keys:
                matches, mask = edge._clean(clean_keys)

            if 'subpixel' in clean_keys:
                offsets = edge.subpixel_offsets

            kp1 = self.node[source].keypoints
            kp2 = self.node[destination].keypoints
            pt_idx = 0
@@ -417,8 +411,9 @@ class CandidateGraph(nx.Graph):
                kp2y = kp2.iloc[m2_pid]['y']

                if 'subpixel' in clean_keys:
                    kp2x += offsets['x_offset'].values[i]
                    kp2y += offsets['y_offset'].values[i]
                    kp2x += row['x_offset']
                    kp2y += row['y_offset']

                values.append([kp2x,
                               kp2y,
                               m2,
+71 −17
Original line number Diff line number Diff line
@@ -7,6 +7,7 @@ from scipy.misc import bytescale
from autocnet.fileio.io_gdal import GeoDataset
from autocnet.matcher import feature_extractor as fe
from autocnet.matcher import outlier_detector as od
from autocnet.matcher import suppression_funcs as spf
from autocnet.cg.cg import convex_hull_ratio
from autocnet.utils.isis_serial_numbers import generate_serial_number
from autocnet.vis.graph_view import plot_node
@@ -45,7 +46,6 @@ class Node(dict, MutableMapping):
    def __init__(self, image_name=None, image_path=None):
        self.image_name = image_name
        self.image_path = image_path
        self._masks = set()
        self._mask_arrays = {}
        self.provenance = {}
        self._pid = 0
@@ -79,12 +79,35 @@ class Node(dict, MutableMapping):

    @property
    def masks(self):
        mask_lookup = {'suppression': 'suppression'}
        if not hasattr(self, '_masks'):
            self._masks = pd.DataFrame()
        # If the mask is coming form another object that tracks
        # state, dynamically draw the mask from the object.
        for c in self._masks.columns:
            if c in mask_lookup:
                self._masks[c] = getattr(self, mask_lookup[c]).mask
        return self._masks

    @masks.setter
    def masks(self, v):
        self._masks.add(v[0])
        self._mask_arrays[v[0]] = v[1]
        column_name = v[0]
        boolean_mask = v[1]
        self.masks[column_name] = boolean_mask

    @property
    def isis_serial(self):
        """
        Generate an ISIS compatible serial number using the data file
        associated with this node.  This assumes that the data file
        has a PVL header.
        """
        if not hasattr(self, '_isis_serial'):
            try:
                self._isis_serial = generate_serial_number(self.image_path)
            except:
                self._isis_serial = None
        return self._isis_serial

    def get_array(self, band=1):
        """
@@ -130,9 +153,25 @@ class Node(dict, MutableMapping):
                                      'parameters':kwargs}
        self._pid += 1

    def anms(self, nfeatures=100, robust=0.9):
        mask = od.adaptive_non_max_suppression(self.keypoints,nfeatures,robust)
        self.masks = ('anms', mask)
    def suppress(self, func=spf.response, **kwargs):
        if not hasattr(self, 'keypoints'):
            raise AttributeError('No keypoints extracted for this node.')

        domain = self.handle.raster_size
        self.keypoints['strength'] = self.keypoints.apply(func, axis=1)

        if not hasattr(self, 'suppression'):
            # Instantiate a suppression object and suppress keypoints
            self.suppression = od.SpatialSuppression(self.keypoints, domain, **kwargs)
            self.suppression.suppress()
        else:
            # Update the suppression object attributes and process
            for k, v in kwargs.items():
                if hasattr(self.suppression, k):
                    setattr(self.suppression, k, v)
            self.suppression.suppress()

        self.masks = ('suppression', self.suppression.mask)

    def coverage_ratio(self, clean_keys=[]):
        """
@@ -159,16 +198,31 @@ class Node(dict, MutableMapping):
    def plot(self, clean_keys=[], **kwargs):  # pragma: no cover
        return plot_node(self, clean_keys=clean_keys, **kwargs)

    @property
    def isis_serial(self):
    def _clean(self, clean_keys):
        """
        Generate an ISIS compatible serial number using the data file
        associated with this node.  This assumes that the data file
        has a PVL header.
        Given a list of clean keys and a provenance id compute the
        mask of valid matches

        Parameters
        ----------
        clean_keys : list
                     of columns names (clean keys)
        pid : int
              The provenance id of the parameter set to be cleaned.
              Defaults to the last run.

        Returns
        -------
        matches : dataframe
                  A masked view of the matches dataframe

        mask : series
                    A boolean series to inflate back to the full match set
        """
        if not hasattr(self, '_isis_serial'):
            try:
                self._isis_serial = generate_serial_number(self.image_path)
            except:
                self._isis_serial = None
        return self._isis_serial
 No newline at end of file
        if not hasattr(self, 'keypoints'):
            raise AttributeError('Keypoints have not been extracted for this node.')
        panel = self.masks
        mask = panel[clean_keys].all(axis=1)
        matches = self.keypoints[mask]
        return matches, mask
+0 −6
Original line number Diff line number Diff line
@@ -50,12 +50,6 @@ class TestNode(unittest.TestCase):
        self.assertNotEqual(find_in_dict(p0, 'nfeatures'),
                            find_in_dict(p1, 'nfeatures'))

    def test_anms(self):
        image = self.node.get_array()
        self.node.extract_features(image, extractor_parameters={'nfeatures':100})
        self.node.anms(nfeatures=10)
        self.assertIn('anms', self.node.masks)
        self.assertTrue(sum(self.node._mask_arrays['anms']), 10)

    def test_isis_serial(self):
        serial = self.node.isis_serial
+148 −50
Original line number Diff line number Diff line
from collections import deque
import math

import cv2
import numpy as np
import pandas as pd

from autocnet.utils.observable import Observable

class DistanceRatio(object):

class DistanceRatio(Observable):

    """
    A stateful object to store ratio test results and provenance.
@@ -37,6 +40,9 @@ class DistanceRatio(object):
        self._observers = set()
        self.matches = matches
        self.mask = None
        self.clean_keys = None
        self.single = None
        self.attrs = ['mask', 'ratio', 'clean_keys', 'single']

    @property
    def nvalid(self):
@@ -90,66 +96,158 @@ class DistanceRatio(object):
        self._action_stack.append(state_package)
        self._current_action_stack = len(self._action_stack) - 1

    def subscribe(self, func):

class SpatialSuppression(Observable):
    """
        Subscribe some observer to the edge
    Spatial suppression using disc based method.

        Parameters
    Attributes
    ----------
        func : object
               The callable that is to be executed on update
        """
        self._observers.add(func)
    df : dataframe
         Input dataframe used for suppressing

    def _notify_subscribers(self, *args, **kwargs):
        """
        The 'update' call to notify all subscribers of
        a change.
        """
        for update_func in self._observers:
            update_func(self, *args, **kwargs)
    mask : series
           pandas boolean series

    def rollforward(self, n=1):
        """
        Roll forwards in the object history, e.g. do
    max_radius : float
                 Maximum allowable point radius

        Parameters
        ----------
        n : int
            the number of steps to roll forwards
    min_radius : float
                 The smallest allowable radius size

    nvalid : int
             The number of valid points after suppression

    k : int
        The number of points to be saved

    error_k : float
              [0,1] the acceptable error in k

    domain : tuple
             The (x,y) extent of the input domain
    """
        idx = self._current_action_stack + n
        if idx > len(self._action_stack) - 1:
            idx = len(self._action_stack) - 1
        self._current_action_stack = idx
        state = self._action_stack[idx]
        setattr(self, 'mask', state['mask'])
        setattr(self, 'ratio', state['ratio'])
        setattr(self, 'clean_keys', state['clean_keys'])
        setattr(self, 'single', state['single'])
        # Reset attributes (could also cache)
        self._notify_subscribers(self)

    def rollback(self, n=1):
    def __init__(self, df, domain, min_radius=1, k=250, error_k=0.05):
        columns = df.columns
        for i in ['x', 'y', 'strength']:
            if i not in columns:
                raise ValueError('The dataframe is missing a {} column.'.format(i))
        self._df = df.sort_values(by=['strength'], ascending=False).copy()
        self.max_radius = min(domain)
        self.min_radius = min_radius
        self.domain = domain
        self.mask = None
        self._k = k
        self._error_k = error_k

        self.attrs = ['mask', 'k', 'error_k']

        self._action_stack = deque(maxlen=10)
        self._current_action_stack = 0
        self._observers = set()

    @property
    def nvalid(self):
        return self.mask.sum()

    @property
    def k(self):
        return self._k

    @k.setter
    def k(self, v):
        self._k = v

    @property
    def error_k(self):
        return self._error_k

    @error_k.setter
    def error_k(self, v):
        self._error_k = v

    @property
    def df(self):
        return self._df

    def suppress(self):
        """
        Roll backward in the object histroy, e.g. undo
        Suppress subpixel registered points to that k +- k * error_k
        points, with good spatial distribution, remain

        Adds a suppression mask to the edge mask dataframe.

        Parameters
        ----------
        n : int
            the number of steps to roll backwards
        k : int
            The desired number of output points

        error_k : float
                  [0,1) The acceptable epsilon
        """
        idx = self._current_action_stack - n
        if idx < 0:
            idx = 0
        self._current_action_stack = idx
        state = self._action_stack[idx]
        setattr(self, 'mask', state['mask'])
        setattr(self, 'ratio', state['ratio'])
        setattr(self, 'clean_keys', state['clean_keys'])
        setattr(self, 'single', state['single'])
        # Reset attributes (could also cache)

        df = self.df
        if self.k > len(df):
           raise ValueError('Only {} valid points, but {} points requested'.format(len(df), self.k))
        min_radius = self.min_radius
        max_radius = self.max_radius
        while True:
            r = (min_radius + max_radius) / 2
            cell_size = int(r / math.sqrt(2))

            # Setup to store results
            result = []

            # Compute the bin edges and assign points to the appropriate bins
            x_edges = np.arange(0,self.domain[0],
                                self.domain[0] / cell_size)
            y_edges = np.arange(0,self.domain[1],
                                self.domain[1] / cell_size)
            grid = np.zeros((len(y_edges), len(x_edges)), dtype=np.bool)

            # Bin assignment
            xbins = np.digitize(df['x'], bins=x_edges)
            ybins = np.digitize(df['y'], bins=y_edges)
            bounds = True
            for i, (idx, p) in enumerate(df.iterrows()):
                x_center = xbins[i]
                y_center = ybins[i]
                cell = grid[y_center - 1 , x_center - 1]
                if cell == False:
                    result.append(idx)
                    if len(result) > self.k - self.k * self.error_k:
                        # Search the lower half, the radius is too small
                        max_radius = r
                        bounds = False
                        continue

                    # Cover the necessary cells
                    grid[y_center - 3: y_center + 3,
                         x_center - 3: x_center + 3] = True
            if bounds is False:
                continue

            #  Check break conditions
            if self.k - self.k * self.error_k < len(result) < self.k + self.k * self.error_k:
                break
            elif abs(max_radius - min_radius) < 5:
                break
            elif len(result) < self.k:
                # Search the upper half, the radius is too small
                min_radius = r

        self.mask = pd.Series(False, self.df.index)
        self.mask.loc[np.array(result)] = True

        state_package = {'mask':self.mask,
                         'k': self.k,
                         'error_k': self.error_k}

        self._action_stack.append(state_package)
        self._notify_subscribers(self)
        self._current_action_stack = len(self._action_stack) - 1  # 0 based vs. 1 based


def self_neighbors(matches):
    """
Loading