Commit 2ca4991b authored by Adoram-Kershner, Lauren's avatar Adoram-Kershner, Lauren
Browse files

Merge branch 'affine_update' into 'main'

Affine update

Closes #602

See merge request astrogeology/autocnet!658
parents f5e14f24 aeabb5a8
Loading
Loading
Loading
Loading
+9 −1
Original line number Diff line number Diff line
@@ -34,12 +34,20 @@ release.
-->
## [Unreleased]

## [0.7.1]()
### Added
- [`pool_pre_ping`](https://docs.sqlalchemy.org/en/14/core/pooling.html#disconnect-handling-pessimistic) to the sqlalchemy engine connection to handle instances where hundreds of connections are simultaneously connecting to the database.
- verbose option to the smart subpixel matcher that will visualize the reference and moving ROIs in order to better support single point visualization.

### Changed
- Estimation of the affine transformation no longer needs to use points completely within the destination (moving) image. Negative values are still valid for affine estimation and the sensor model is not constrained to within the image.

### Fixed
- Fixed connection issues where too many connections to AWS RDW were causing connetions failures by adding an exponential sleep over five retries.
- Fixed missing import in place points in overlap that was causing a failure when attempting to throw a warning.

### Removed
- Ciratefi matcher from subpixel.py as the matcher is seldom used and better alternatives for scale and rotation invariance exist in the library.

## [0.7.0]()

### Added
+5 −3
Original line number Diff line number Diff line
@@ -7,6 +7,7 @@ import numpy as np
import networkx as nx
import geopandas as gpd
import ogr
import logging

from skimage import transform as tf
from scipy.spatial import Voronoi, Delaunay, ConvexHull
@@ -20,6 +21,8 @@ from autocnet.cg import cg

from shapely.ops import cascaded_union, polygonize

# set up the logger file
log = logging.getLogger(__name__)

def two_point_extrapolate(x, xs, ys):
    """
@@ -321,7 +324,6 @@ def generate_random(number, polygon):
    while len(points) < number and i < 1000:
        pnt = Point(random.uniform(minx, maxx), random.uniform(miny, maxy))
        if polygon.contains(pnt):
            print(pnt.x, pnt.y)
            points.append([pnt.x, pnt.y])
        i += 1
    return np.asarray(points)
@@ -553,7 +555,7 @@ def distribute_points_in_geom(geom, method="classic",
        else:
            valid = point_distribution_func(geom, nspts, ewpts, Session=Session, **kwargs)
    else:
        print('WTF Willy')
        log.warning('WTF Willy')
    return np.array(valid)


@@ -652,7 +654,7 @@ def rasterize_polygon(shape, vertices, dtype=bool):
    for k in range(vertices.shape[0]):
        fill = np.all([fill, check(vertices[k-1], vertices[k], base_array)], axis=0)

    print(fill.any())
    log.info(fill.any())
    # Set all values inside polygon to one
    base_array[fill] = 1
    return base_array
+5 −1
Original line number Diff line number Diff line
@@ -12,6 +12,7 @@ from skimage.feature import blob_log, blob_doh
from math import sqrt, atan2, pi
from hoggorm.mat_corr_coeff import RVcoeff
import math
import logging

import scipy
from scipy.spatial import cKDTree
@@ -37,6 +38,9 @@ from autocnet.utils.utils import bytescale
from autocnet.matcher.cpu_extractor import extract_features
from autocnet import cg

# set up the logger file
log = logging.getLogger(__name__)

def image_diff(arr1, arr2):
    """
    Diff two images but accounts for null pixels. Intended to be used with change 
@@ -578,7 +582,7 @@ def rv_detector(im1, im2, search_size, pattern_size=None, threshold=.999):
        pattern_size = search_size

    if search_size < pattern_size:
        print("Pattern size must be <= search size.  Setting pattern_size=search_size")
        log.warning("Pattern size must be <= search size.  Setting pattern_size=search_size")
        search_size = pattern_size

    rv = np.empty(im1.shape)
+11 −23
Original line number Diff line number Diff line
@@ -183,7 +183,7 @@ def manage_messages(args, queue):
                log.warning('Expected to process a cluster job, but the message queue is empty.')
                return
            elif args['queue'] == True:
                print(f'Completed processing from queue: {queue}.')
                log.info(f'Completed processing from queue: {queue}.')
                return

        # The key to remove from the working queue is the message. Essentially, find this element
@@ -193,21 +193,9 @@ def manage_messages(args, queue):
        #Convert the message from binary into a dict
        msgdict = json.loads(msg, object_hook=object_hook)

        
        # should replace this with some logging logic later
        # rather than redirecting std out
        stdout = StringIO()
        with redirect_stdout(stdout):
        # Apply the algorithm
        response = process(msgdict)
            # Should go to a logger someday! (today is that day!)
            print(response)
            
        out = stdout.getvalue()
        # print to get everything on the logs in the directory
        print(out)
        sys.stdout.flush()
        stdout.flush()
        log.info(response)

        #serializedDict = json.loads(msg)
        #results  = msgdict['results'] if msgdict['results'] else [{"status" : "success"}]
+41 −1
Original line number Diff line number Diff line
@@ -425,6 +425,46 @@ class Edge(dict, MutableMapping):
        mask[mask] = hmask.ravel()
        self.masks['homography'] = mask

    def _prep_subpixel(self, nmatches, nstrengths=2):
        """
        Setup the data strutures to return for subpixel matching.

        Parameters
        ----------
        nmatches : int
                    The number of pixels to be subpixel matches

        nstrengths : int
                        The number of 'strength' values to be returned
                        by the subpixel matching method.

        Returns
        -------
        shifts_x : ndarray
                (nmatches, 1) to store the x_shift parameter

        shifts_y : ndarray
                (nmatches, 1) to store the y_shift parameter

        strengths : ndarray
                    (nmatches, nstrengths) to store the strengths for each point

        new_x : ndarray
                (nmatches, 1) to store the updated x coordinates

        new_y : ndarray
                (nmatches, 1) to store the updated y coordinates
        """
        # Setup to store output to append to dataframes
        shifts_x = np.zeros(nmatches)
        shifts_y = np.zeros(nmatches)
        strengths = np.zeros((nmatches, nstrengths))

        new_x = np.empty(nmatches)
        new_y = np.empty(nmatches)

        return shifts_x, shifts_y, strengths, new_x, new_y

    def subpixel_register(self, method='phase', clean_keys=[],
                          template_size=251, search_size=251, **kwargs):
        """
@@ -469,7 +509,7 @@ class Edge(dict, MutableMapping):
        elif method == 'template':
            func = sp.subpixel_template
            nstrengths = 1
        shifts_x, shifts_y, strengths, new_x, new_y = sp._prep_subpixel(len(matches), nstrengths)
        shifts_x, shifts_y, strengths, new_x, new_y = self._prep_subpixel(len(matches), nstrengths)

        # for each edge, calculate this for each keypoint pair
        for i, (idx, row) in enumerate(matches.iterrows()):
Loading