Commit 1a1b866e authored by Nelson, Gavin (Contractor) Scott's avatar Nelson, Gavin (Contractor) Scott
Browse files

Merge branch 'logging' into 'dev'

finished up converting autocnet modules to use logging

Closes #602

See merge request astrogeology/autocnet!636
parents d7aefd46 01005da6
Loading
Loading
Loading
Loading
+5 −2
Original line number Diff line number Diff line
@@ -7,6 +7,7 @@ import numpy as np
import networkx as nx
import geopandas as gpd
import ogr
import logging

from skimage import transform as tf
from scipy.spatial import Voronoi, Delaunay, ConvexHull
@@ -20,6 +21,8 @@ from autocnet.cg import cg

from shapely.ops import cascaded_union, polygonize

# set up the logger file
log = logging.getLogger(__name__)

def two_point_extrapolate(x, xs, ys):
    """
@@ -553,7 +556,7 @@ def distribute_points_in_geom(geom, method="classic",
        else:
            valid = point_distribution_func(geom, nspts, ewpts, Session=Session, **kwargs)
    else:
        print('WTF Willy')
        log.warning('WTF Willy')
    return np.array(valid)


@@ -652,7 +655,7 @@ def rasterize_polygon(shape, vertices, dtype=bool):
    for k in range(vertices.shape[0]):
        fill = np.all([fill, check(vertices[k-1], vertices[k], base_array)], axis=0)

    print(fill.any())
    log.info(fill.any())
    # Set all values inside polygon to one
    base_array[fill] = 1
    return base_array
+5 −1
Original line number Diff line number Diff line
@@ -12,6 +12,7 @@ from skimage.feature import blob_log, blob_doh
from math import sqrt, atan2, pi
from hoggorm.mat_corr_coeff import RVcoeff
import math
import logging

import scipy
from scipy.spatial import cKDTree
@@ -37,6 +38,9 @@ from autocnet.utils.utils import bytescale
from autocnet.matcher.cpu_extractor import extract_features
from autocnet import cg

# set up the logger file
log = logging.getLogger(__name__)

def image_diff(arr1, arr2):
    """
    Diff two images but accounts for null pixels. Intended to be used with change 
@@ -579,7 +583,7 @@ def rv_detector(im1, im2, search_size, pattern_size=None, threshold=.999):
        pattern_size = search_size

    if search_size < pattern_size:
        print("Pattern size must be <= search size.  Setting pattern_size=search_size")
        log.warning("Pattern size must be <= search size.  Setting pattern_size=search_size")
        search_size = pattern_size

    rv = np.empty(im1.shape)
+11 −23
Original line number Diff line number Diff line
@@ -183,7 +183,7 @@ def manage_messages(args, queue):
                log.warning('Expected to process a cluster job, but the message queue is empty.')
                return
            elif args['queue'] == True:
                print(f'Completed processing from queue: {queue}.')
                log.info(f'Completed processing from queue: {queue}.')
                return

        # The key to remove from the working queue is the message. Essentially, find this element
@@ -193,21 +193,9 @@ def manage_messages(args, queue):
        #Convert the message from binary into a dict
        msgdict = json.loads(msg, object_hook=object_hook)

        
        # should replace this with some logging logic later
        # rather than redirecting std out
        stdout = StringIO()
        with redirect_stdout(stdout):
        # Apply the algorithm
        response = process(msgdict)
            # Should go to a logger someday! (today is that day!)
            print(response)
            
        out = stdout.getvalue()
        # print to get everything on the logs in the directory
        print(out)
        sys.stdout.flush()
        stdout.flush()
        log.info(response)

        #serializedDict = json.loads(msg)
        #results  = msgdict['results'] if msgdict['results'] else [{"status" : "success"}]
+4 −4
Original line number Diff line number Diff line
@@ -1854,7 +1854,7 @@ class NetworkCandidateGraph(CandidateGraph):
        try:
            processing_queue = getattr(self, redis_queue)
        except AttributeError:
            print(f'Unable to find attribute {redis_queue} on this object. Valid queue names are: "processing_queue" and "working_queue".')
            log.exception(f'Unable to find attribute {redis_queue} on this object. Valid queue names are: "processing_queue" and "working_queue".')

        env = self.config['env']
        condaenv = env['conda']
@@ -2075,7 +2075,7 @@ class NetworkCandidateGraph(CandidateGraph):
        for cnt, f in enumerate(filelist):
            # Create the nodes in the graph. Really, this is creating the
            # images in the DB
            print('loading {} of {}'.format(cnt+1, total))
            log.info('loading {} of {}'.format(cnt+1, total))
            self.add_image(f)

        self.from_database()
@@ -2560,7 +2560,7 @@ class NetworkCandidateGraph(CandidateGraph):

            # TO DO: RETURN ALL EDGES
            if len(df) == 0:
                print(f'Overlap {oid} is empty')
                log.info(f'Overlap {oid} is empty')
                return []

            # create graph edges
@@ -2572,7 +2572,7 @@ class NetworkCandidateGraph(CandidateGraph):
            fully_connected_number_of_edges = scipy.special.comb(graph.number_of_nodes(),2)
            all_edges = list(combinations(graph.nodes, 2))
            if graph.number_of_edges() == fully_connected_number_of_edges:
                print(f'Overlap {oid} is fully connected')
                log.info(f'Overlap {oid} is fully connected')
                return []

            # return missing image id pairs
+15 −15
Original line number Diff line number Diff line
@@ -73,16 +73,16 @@ def generate_ground_points(Session, ground_mosaic, nspts_func=lambda x: int(roun
    newsamples = []

    # throw out points not intersecting the ground reference images
    print('points to lay down: ', len(coords))
    log.info('points to lay down: ', len(coords))
    for i, coord in enumerate(coords):
        # res = ground_session.execute(formated_sql)
        p = Point(*coord)
        print(f'point {i}'),
        log.info(f'point {i}'),


        linessamples = isis.point_info(ground_mosaic.file_name, p.x, p.y, 'ground')
        if linessamples is None:
            print('unable to find point in ground image')
            log.warning('unable to find point in ground image')
            continue
        line = linessamples.get('Line')
        sample = linessamples.get('Sample')
@@ -108,7 +108,7 @@ def generate_ground_points(Session, ground_mosaic, nspts_func=lambda x: int(roun
                  newpoint.get('PlanetocentricLatitude'))

        if not (xy_in_polygon(p.x, p.y, fp_poly)):
                print('Interesting point not in mosaic area, ignore')
                log.warning('Interesting point not in mosaic area, ignore')
                continue

        old_coord_list.append(op)
@@ -251,9 +251,9 @@ def propagate_point(Session,
                continue

            try:
                print(f'prop point: base_image: {base_image}')
                print(f'prop point: dest_image: {dest_image}')
                print(f'prop point: (sx, sy): ({sx}, {sy})')
                log.info(f'prop point: base_image: {base_image}')
                log.info(f'prop point: dest_image: {dest_image}')
                log.info(f'prop point: (sx, sy): ({sx}, {sy})')
                x,y, dist, metrics, corrmap = geom_match_simple(base_image, dest_image, sx, sy, 16, 16, \
                        match_func = match_func, \
                        match_kwargs=match_kwargs, \
@@ -280,14 +280,14 @@ def propagate_point(Session,
        return new_measures

    if verbose:
        print("match_results final length: ", len(match_results))
        print("best_results length: ", len(best_results))
        print("Full results: ", best_results)
        print("Winning CORRs: ", best_results[:,3], "Themis Pixel shifts: ", best_results[:,4])
        print("Themis Images: ", best_results[:,6], "CTX images:", best_results[:,7])
        print("Themis Sample: ", sx, "CTX Samples: ", best_results[:,1])
        print("Themis Line: ", sy, "CTX Lines: ", best_results[:,2])
        print('\n')
        log.info("match_results final length: ", len(match_results))
        log.info("best_results length: ", len(best_results))
        log.info("Full results: ", best_results)
        log.info("Winning CORRs: ", best_results[:,3], "Themis Pixel shifts: ", best_results[:,4])
        log.info("Themis Images: ", best_results[:,6], "CTX images:", best_results[:,7])
        log.info("Themis Sample: ", sx, "CTX Samples: ", best_results[:,1])
        log.info("Themis Line: ", sy, "CTX Lines: ", best_results[:,2])
        log.info('\n')

    # if the single best results metric (returned by geom_matcher) is None
    if len(best_results[:,3])==1 and best_results[:,3][0] is None:
Loading