Commit 99764933 authored by Jay's avatar Jay Committed by jay
Browse files

Refactor keypoints to get_keypoints to encapsulate positional indexing

parent 51d02e21
Loading
Loading
Loading
Loading
+22 −24
Original line number Diff line number Diff line
@@ -110,26 +110,26 @@ class Edge(dict, MutableMapping):
        else:
            raise AttributeError('Matches have not been computed for this edge')

        all_source_keypoints = self.source.keypoints.iloc[matches['source_idx']]
        all_destin_keypoints = self.destination.keypoints.iloc[matches['destination_idx']]
        all_source_keypoints = self.source.get_keypoint_coordinates(matches['source_idx'])
        all_destin_keypoints = self.destination.get_keypoint_coordinates(matches['destination_idx'])

        matches, mask = self._clean(clean_keys)

        s_keypoints = self.source.keypoints.iloc[matches['source_idx'].values]
        d_keypoints = self.destination.keypoints.iloc[matches['destination_idx'].values]
        s_keypoints = self.source.get_keypoint_coordinates(matches['source_idx'].values)
        d_keypoints = self.destination.get_keypoints_coordinates(matches['destination_idx'].values)

        transformation_matrix, fundam_mask = od.compute_fundamental_matrix(s_keypoints[['x', 'y']].values,
                                                                           d_keypoints[['x', 'y']].values,
        transformation_matrix, fundam_mask = od.compute_fundamental_matrix(s_keypoints,
                                                                           d_keypoints,
                                                                           **kwargs)
        try:
            fundam_mask = fundam_mask.ravel()
        except:
            return
        # Convert the truncated RANSAC mask back into a full length mask
        mask[mask == True] = fundam_mask
        mask[mask] = fundam_mask
        self.fundamental_matrix = FundamentalMatrix(transformation_matrix,
                                                    all_source_keypoints[['x', 'y']],
                                                    all_destin_keypoints[['x', 'y']],
                                                    all_source_keypoints,
                                                    all_destin_keypoints,
                                                    mask=mask)

        # Subscribe the health watcher to the fundamental matrix observable
@@ -166,21 +166,21 @@ class Edge(dict, MutableMapping):

        matches, mask = self._clean(clean_keys)

        s_keypoints = self.source.keypoints.iloc[matches['source_idx'].values]
        d_keypoints = self.destination.keypoints.iloc[matches['destination_idx'].values]
        s_keypoints = self.source.get_keypoint_coordinates(matches['source_idx'])
        d_keypoints = self.destination.get_keypoint_coordinates(matches['destination_idx'])

        transformation_matrix, ransac_mask = od.compute_homography(s_keypoints[['x', 'y']].values,
                                                                   d_keypoints[['x', 'y']].values,
        transformation_matrix, ransac_mask = od.compute_homography(s_keypoints.values,
                                                                   d_keypoints.values,
                                                                   **kwargs)

        ransac_mask = ransac_mask.ravel()
        # Convert the truncated RANSAC mask back into a full length mask
        mask[mask == True] = ransac_mask
        mask[mask] = ransac_mask
        self.masks = ('ransac', mask)
        self.homography = Homography(transformation_matrix,
                                     s_keypoints[ransac_mask][['x', 'y']],
                                     d_keypoints[ransac_mask][['x', 'y']],
                                     mask=mask[mask == True].index)
                                     s_keypoints[ransac_mask],
                                     d_keypoints[ransac_mask],
                                     mask=mask[mask].index)

        # Finalize the array to get custom attrs to propagate
        self.homography.__array_finalize__(self.homography)
@@ -244,8 +244,8 @@ class Edge(dict, MutableMapping):
            s_idx = int(row['source_idx'])
            d_idx = int(row['destination_idx'])

            s_keypoint = self.source.keypoints.iloc[s_idx][['x', 'y']].values
            d_keypoint = self.destination.keypoints.iloc[d_idx][['x', 'y']].values
            s_keypoint = self.source.get_keypoint_coordinates(s_idx)
            d_keypoint = self.destination.get_keypoint_coordinates(d_idx)

            # Get the template and search window
            s_template = sp.clip_roi(s_img, s_keypoint, template_size)
@@ -294,13 +294,11 @@ class Edge(dict, MutableMapping):
        if not hasattr(self, 'matches'):
            raise AttributeError('This edge does not yet have any matches computed.')

        # Build up a composite mask from all of the user specified masks
        matches, mask = self._clean(clean_keys)

        domain = self.source.handle.raster_size

        # Massage the dataframe into the correct structure
        coords = self.source.keypoints[['x', 'y']]
        coords = self.source.get_keypoint_coordinates()
        merged = matches.merge(coords, left_on=['source_idx'], right_index=True)
        merged['strength'] = merged.apply(func, axis=1)

@@ -315,7 +313,7 @@ class Edge(dict, MutableMapping):
            self.suppression.suppress()

        if clean_keys:
            mask[mask == True] = self.suppression.mask
            mask[mask] = self.suppression.mask
        else:
            mask = self.suppression.mask
        self.masks = ('suppression', mask)
@@ -337,7 +335,7 @@ class Edge(dict, MutableMapping):
        matches, _ = self._clean(clean_keys)

        d_idx = matches['destination_idx'].values
        keypoints = self.destination.keypoints.iloc[d_idx][['x', 'y']].values
        keypoints = self.destination.get_keypoint_coordinates(d_idx)
        if len(keypoints) < 3:
            raise ValueError('Convex hull computation requires at least 3 measures.')

+2 −2
Original line number Diff line number Diff line
@@ -482,8 +482,8 @@ class CandidateGraph(nx.Graph):
                subpixel = True
                point_type = 3

            kp1 = self.node[source].keypoints
            kp2 = self.node[destination].keypoints
            kp1 = self.node[source].get_keypoints()
            kp2 = self.node[destination].get_keypoints()
            pt_idx = 0
            values = []
            for i, (idx, row) in enumerate(matches.iterrows()):
+30 −13
Original line number Diff line number Diff line
@@ -119,6 +119,22 @@ class Node(dict, MutableMapping):
        array = self.handle.read_array(band=band)
        return bytescale(array)

    def get_keypoints(self, index=None):
        if hasattr(self, '_keypoints'):
            try:
                return self._keypoints.iloc[index]
            except:
                return self._keypoints
        else:
            return None

    def get_keypoint_coordinates(self, index=None):
        keypoints = self.get_keypoints(index=index)
        try:
            return keypoints[['x', 'y']]
        except:
            return None

    def extract_features(self, array, **kwargs):
        """
        Extract features for the node
@@ -141,9 +157,9 @@ class Node(dict, MutableMapping):
            else:
                octave = (-128 | octave)
            keypoints[i] = kpt.pt[0], kpt.pt[1], kpt.response, kpt.size, kpt.angle, octave, layer  # y, x
        self.keypoints = pd.DataFrame(keypoints, columns=['x', 'y', 'response', 'size',
        self._keypoints = pd.DataFrame(keypoints, columns=['x', 'y', 'response', 'size',
                                                          'angle', 'octave', 'layer'])
        self._nkeypoints = len(self.keypoints)
        self._nkeypoints = len(self._keypoints)
        self.descriptors = descriptors.astype(np.float32)

    def load_features(self, in_path):
@@ -166,7 +182,7 @@ class Node(dict, MutableMapping):
        index = raw_kps['index']
        clean_kps = utils.remove_field_name(raw_kps, 'index')
        columns = clean_kps.dtype.names
        self.keypoints = pd.DataFrame(data=clean_kps, columns=columns, index=index)
        self._keypoints = pd.DataFrame(data=clean_kps, columns=columns, index=index)

        if isinstance(in_path, str):
            hdf = None
@@ -181,7 +197,8 @@ class Node(dict, MutableMapping):
        out_path : str or object
                   PATH to the hdf file or a HDFDataset object handle
        """
        if not hasattr(self, 'keypoints'):

        if not hasattr(self, '_keypoints'):
            warnings.warn('Node {} has not had features extracted.'.format(i))
            return

@@ -201,7 +218,7 @@ class Node(dict, MutableMapping):
                               compression=io_hdf.DEFAULT_COMPRESSION,
                               compression_opts=io_hdf.DEFAULT_COMPRESSION_VALUE)
            hdf.create_dataset('{}/keypoints'.format(self.image_name),
                               data=hdf.df_to_sarray(self.keypoints.reset_index()),
                               data=hdf.df_to_sarray(self._keypoints.reset_index()),
                               compression=io_hdf.DEFAULT_COMPRESSION,
                               compression_opts=io_hdf.DEFAULT_COMPRESSION_VALUE)
        except:
@@ -214,15 +231,15 @@ class Node(dict, MutableMapping):
            hdf = None

    def suppress(self, func=spf.response, **kwargs):
        if not hasattr(self, 'keypoints'):
        if not hasattr(self, '_keypoints'):
            raise AttributeError('No keypoints extracted for this node.')

        domain = self.handle.raster_size
        self.keypoints['strength'] = self.keypoints.apply(func, axis=1)
        self._keypoints['strength'] = self._keypoints.apply(func, axis=1)

        if not hasattr(self, 'suppression'):
            # Instantiate a suppression object and suppress keypoints
            self.suppression = od.SpatialSuppression(self.keypoints, domain, **kwargs)
            self.suppression = od.SpatialSuppression(self._keypoints, domain, **kwargs)
            self.suppression.suppress()
        else:
            # Update the suppression object attributes and process
@@ -243,14 +260,14 @@ class Node(dict, MutableMapping):
                The ratio of convex hull area to total area.
        """
        ideal_area = self.handle.pixel_area
        if not hasattr(self, 'keypoints'):
        if not hasattr(self, '_keypoints'):
            raise AttributeError('Keypoints must be extracted already, they have not been.')

        if clean_keys:
            mask = np.prod([self._mask_arrays[i] for i in clean_keys], axis=0, dtype=np.bool)
            keypoints = self.keypoints[mask]
            keypoints = self._keypoints[mask]

        keypoints = self.keypoints[['x', 'y']].values
        keypoints = self._keypoints[['x', 'y']].values

        ratio = convex_hull_ratio(keypoints, ideal_area)
        return ratio
@@ -276,10 +293,10 @@ class Node(dict, MutableMapping):
        mask : series
                    A boolean series to inflate back to the full match set
        """
        if not hasattr(self, 'keypoints'):
        if not hasattr(self, '_keypoints'):
            raise AttributeError('Keypoints have not been extracted for this node.')
        panel = self.masks
        mask = panel[clean_keys].all(axis=1)
        matches = self.keypoints[mask]
        matches = self._keypoints[mask]
        return matches, mask
+39 −2
Original line number Diff line number Diff line
import unittest
from unittest.mock import Mock

import pandas as pd
import numpy as np

from .. import edge
from .. import node

class TestEdge(unittest.TestCase):

    def setUp(self):
        self.edge = edge.Edge(source=0, destination=1)
    @classmethod
    def setUpClass(cls):
        source = Mock(node.Node)
        destination = Mock(node.Node)
        cls.edge = edge.Edge(source=source, destination=destination)

        # Define a matches dataframe
        source_image = np.zeros(20)
        destination_image = np.ones(20)
        source_idx = np.repeat(np.arange(10), 2)
        destination_idx = np.array([336,  78, 267, 467, 214, 212, 463, 241,  27, 154, 320, 108, 196,
                                    460,  67, 135,  80, 122, 106, 343])
        distance = np.array([263.43121338,  287.05050659,  231.03895569,  242.14459229,
                             140.07498169,  299.86331177,  332.05722046,  337.71438599,
                             94.9052124,  208.04806519,  102.21056366,  173.48774719,
                             102.19099426,  237.63206482,  240.93359375,  277.74627686,
                             217.82791138,  224.22979736,  260.3939209,  287.91143799])
        data = np.stack((source_image, source_idx, destination_image, destination_idx, distance), axis=-1)
        cls.edge.matches = pd.DataFrame(data, columns=['source_image', 'source_idx',
                                                 'destination_image', 'destination_idx',
                                                 'distance'])

    def test_properties(self):
        pass

    def test_masks(self):
        self.assertIsInstance(self.edge.masks, pd.DataFrame)

    def test_health(self):
        self.assertEqual(self.edge.health, 1.0)

    def test_compute_fundamental_matrix(self):
        with self.assertRaises(AttributeError):
            self.edge.compute_fundamental_matrix()
        self.assertTrue(False)

+3 −3
Original line number Diff line number Diff line
@@ -30,7 +30,7 @@ class TestNode(unittest.TestCase):
    def test_extract_features(self):
        image = self.node.get_array()
        self.node.extract_features(image, extractor_parameters={'nfeatures':10})
        self.assertEquals(len(self.node.keypoints), 10)
        self.assertEquals(len(self.node.get_keypoints()), 10)
        self.assertEquals(len(self.node.descriptors), 10)
        self.assertIsInstance(self.node.descriptors[0], np.ndarray)
        self.assertEqual(10, self.node.nkeypoints)
@@ -47,9 +47,9 @@ class TestNode(unittest.TestCase):
        image = self.node.get_array()
        self.node.extract_features(image, method='sift', extractor_parameters={'nfeatures':10})
        self.node.save_features('node_test.hdf')
        kps = self.node.keypoints.copy()
        kps = self.node.get_keypoints().copy()
        descriptors = self.node.descriptors.copy()
        self.node.load_features('node_test.hdf')
        self.assertTrue((kps == self.node.keypoints).all().all())
        self.assertTrue((kps == self.node.get_keypoints()).all().all())

        os.remove('node_test.hdf')
Loading