Unverified Commit ad69077a authored by jlaura's avatar jlaura Committed by GitHub
Browse files

Removes unused acn_* and updates README (#565)

* Removes unused acn_* and updates README

* removed pvl

* More dependency updates
parent 7c188838
Loading
Loading
Loading
Loading
+5 −0
Original line number Diff line number Diff line
@@ -141,6 +141,11 @@ are loaded into the database sequentially and then a spatial overlay operation i
to determine how individual images overlap with one another (using the footprints
generated from the a priori sensor pointing.)

This method performs the following actions:
- Load each image, as a row, into the Images table of the database. This includes attempting to extract a footprint from the image. The footprint can be read from an ISIS cube if footprint init has been run. Alternatively, experimental support exists for Community Sensor Model sensors developed by USGS.
- Use the database to compute the overlapping geometries between each of the images. For large data sets this can be a costly, one time operation. Limiting the number of geometries in image footprints can significantly improve performance. For each overlap, a row is added to the Overlay table. This table tracks the overlapping geometries and the images that intersect those geometries.
- Return a NewtorkCandidateGraph where each node represents and image and each edge represents a spatial overlap between said images.

### Operations on the NCG: Database Rows
After we have an NCG, we want to perform operations on the graph or on database
rows associated with the graph (e.g., the Points, Measures, or Image Overlaps).
+2 −4
Original line number Diff line number Diff line
@@ -631,11 +631,9 @@ class NetworkNode(Node):
              The URI to a service that can create an ISD to instantiate
              a sensor.
        """
        # Create the camera entry
        import pvl
        import requests
        import json
        raise NotImplementedError

        # TODO: This should pass the straight metadata and not mess with mundging it.
        label = pvl.dumps(self.geodata.metadata).decode()
        response = requests.post(url, json={'label':label})
        response = response.json()
+0 −30
Original line number Diff line number Diff line
from shapely.geometry import MultiPoint
from plio.io.io_gdal import GeoDataset
import numpy as np
import matplotlib.pyplot as plt

import ctypes
import enum
import glob
import json
import os
import os.path
import socket
from ctypes.util import find_library

import pandas as pd
import scipy
from sqlalchemy import (Boolean, Column, Float, ForeignKey, Integer,
                        LargeBinary, String, UniqueConstraint, create_engine,
                        event, orm, pool)
from sqlalchemy.ext.declarative import declarative_base

import geopandas as gpd
import plio
import pvl
import pyproj
import pysis
import cv2

from gdal import ogr

import geoalchemy2
from geoalchemy2 import Geometry, WKTElement
from geoalchemy2.shape import to_shape
from geoalchemy2 import functions

from knoten import csm

from plio.io.io_controlnetwork import from_isis, to_isis

from shapely import wkt
from shapely.geometry.multipolygon import MultiPolygon
from shapely.geometry import Point

from plurmy import Slurm

from autocnet.matcher.subpixel import check_match_func
from autocnet.io.db.model import Images, Points, Measures, JsonEncoder
from autocnet.cg.cg import distribute_points_in_geom, xy_in_polygon
from autocnet.io.db.connection import new_connection
from autocnet.spatial import isis
from autocnet.transformation.spatial import reproject, oc2og
from autocnet.matcher.cpu_extractor import extract_most_interesting

bin/acn_load_images

deleted100644 → 0
+0 −130
Original line number Diff line number Diff line
#!/usr/bin/env python

import json
import os
os.environ['PROJ_LIB'] = '/home/jlaura/anaconda3/envs/autocnet/share/proj'
import sys
import time
import warnings

import csmapi
from knoten.csm import generate_latlon_footprint, generate_boundary
from plio.io.io_gdal import GeoDataset
from plio.io.isis_serial_number import generate_serial_number
import pvl
from redis import StrictRedis
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
import shapely
import yaml

from autocnet.io.db.redis_queue import pop_computetime_push
from autocnet.io.db.model import Images, Cameras
from autocnet.utils import utils
from autocnet import Session

def requests_retry_session(
    retries=3,
    backoff_factor=0.3,
    status_forcelist=(500, 502, 504),
    session=None,
):
    session = session or requests.Session()
    retry = Retry(
        total=retries,
        read=retries,
        connect=retries,
        backoff_factor=backoff_factor,
        status_forcelist=status_forcelist,
    )
    adapter = HTTPAdapter(max_retries=retry)
    session.mount('http://', adapter)
    session.mount('https://', adapter)
    return session

#Load the config file
try:
    print('Using config: ', os.environ['autocnet_config'])
    with open(os.environ['autocnet_config'], 'r') as f:
        config = yaml.safe_load(f)
except:
    print("The 'autocnet_config' environment variable is not set.")
    sys.exit(1)

def create_footprint(config, geodata, camera):
    boundary = generate_boundary(geodata.raster_size[::-1])  # yx to xy
    dem = GeoDataset(config['spatial']['dem'])
    geom = generate_latlon_footprint(camera, boundary, dem=dem)
    geom.FlattenTo2D()
    return geom

def create_camera(config, geodata, imagepath):
    # Create the camera entry
    label = pvl.dumps(geodata.metadata).decode()
    url = config['pfeffernusse']['url']
    response = requests_retry_session().post(url, json={'label':label})
    response = response.json()
    model_name = response.get('name_model', None)
    if model_name is None:
        return (None, None)
    isdpath = os.path.splitext(imagepath)[0] + '.json'
    with open(isdpath, 'w') as f:
        json.dump(response, f)
    isd = csmapi.Isd(imagepath)
    plugin = csmapi.Plugin.findPlugin('UsgsAstroPluginCSM')
    camera = plugin.constructModelFromISD(isd, model_name)
    serialized_camera = camera.getModelState()

    cam = Cameras(camera=serialized_camera)
    return cam, camera

def main(msg, config):
    session = Session()
    serials = [s[0] for s in session.query(Images.serial).all()]
    session.close()

    images = []
    for path in msg['imagepaths']:
        try:
            serial = generate_serial_number(path)
        except:
            warnings.warn(f'Unable to generate serial for {path}')
            continue
        if serial in serials:
            print(f'Image {path} already in database.')
            continue
        print(f'Processing: {path}')
        try:
            geodata = GeoDataset(path)
            dbcam, cam = create_camera(config, geodata, path)
            if dbcam is None:
                warnings.warn(f'Failed to add {path}')
                continue
            fp = create_footprint(config, geodata, cam)
            if isinstance(fp, shapely.geometry.Polygon):
                fp = shapely.geometry.MultiPolygon([fp])
            serial = generate_serial_number(path)
            i = Images(name=geodata.file_name,
                    path=path,
                    geom=fp,
                    cameras=dbcam,
                    serial=serial)
            images.append(i)
        except:
            warnings.warn(f'Failed to add {path}.')

    Images.bulkadd(images)

if __name__ == '__main__':
    conf = config['redis']
    queue = StrictRedis(host=conf['host'], port=conf['port'], db=0)

    msg = pop_computetime_push(queue,
                               conf['processing_queue'],
                               conf['working_queue'])
    if msg is None:
        warnings.warn('Expected to process a cluster job, but the message queue is empty.')
        sys.exit()

    main(msg, config)

bin/acn_propagate

deleted100755 → 0
+0 −65
Original line number Diff line number Diff line
#!/usr/bin/env python

import copy
import os
import json
import sys
import warnings

from redis import StrictRedis
import yaml

from autocnet.io.db.redis_queue import pop_computetime_push, finalize
from autocnet.matcher import cross_instrument_matcher as cim
from autocnet.io.db.model import Points, Measures
from autocnet import Session

#Load the config file
try:
    with open(os.environ['autocnet_config'], 'r') as f:
        config = yaml.safe_load(f)
except:
    print("The 'autocnet_config' environment variable is not set.")
    sys.exit(1)

def main(msg, config):
    print("Adding points using params:")
    print(json.dumps(msg, indent=2))
    
    msg.pop('walltime', None)
    msg.pop('max_time', None)
    point_measures = cim.propagate_point(**msg)

    print("Point Measures:")
    print(point_measures)

    point_record = point_measures[0]
    p = Points()
    p.pointtype = 3
    p.apriori = point_record["point_ground"]
    p.adjusted = point_record["point_ground"]

    for m in point_measures:
        p.measures.append(Measures(line=float(m['line']),
                                   sample = float(m['sample']),
                                   aprioriline = float(m['line']),
                                   apriorisample = float(m['sample']),
                                   imageid = int(m['imageid']),
                                   serial = m['serial'],
                                   measuretype=3))

    print('Adding {} measures to the database.'.format(len(point_measures)))
    Points.bulkadd([p])

if __name__ == '__main__':
    conf = config['redis']
    queue = StrictRedis(host=conf['host'], port=conf['port'], db=0)

    msg = pop_computetime_push(queue,
                               conf['processing_queue'],
                               conf['working_queue'])
    if msg is None:
        warnings.warn('Expected to process a cluster job, but the message queue is empty.')
        sys.exit()

    main(msg, config)
Loading