Loading notebooks/Socet2ISIS.ipynb +151 −47 Original line number Diff line number Diff line %% Cell type:code id: tags: ``` python import os import sys from functools import singledispatch import warnings import pandas as pd import numpy as np import math import pyproj # sys.path.insert(0, "/home/tthatcher/Desktop/Projects/Plio/plio") from plio.examples import get_path from plio.io.io_bae import read_gpf, read_ipf from collections import defaultdict import plio.io.io_controlnetwork as cn import plio.io.isis_serial_number as sn ``` %% Cell type:code id: tags: ``` python # Reads a .atf file and outputs all of the # .ipf, .gpf, .sup, .prj, and path to locate the # .apf file (should be the same as all others) def read_atf(atf_file): with open(atf_file) as f: files = [] ipf = [] sup = [] # Extensions of files we want files_ext = ['.prj', '.sup', '.ipf', '.gpf'] files_dict = [] files = defaultdict(list) # Grabs every PRJ, GPF, SUP, and IPF image from the ATF file for line in f: if line[-4:-1] == 'prj' or line[-4:-1] == 'gpf' or line[-4:-1] == 'sup' or line[-4:-1] == 'ipf' or line[-4:-1] == 'atf': files.append(line) ext = os.path.splitext(line)[-1].strip() # Check is needed for split as all do not have a space if ext in files_ext: files = np.array(files) # If it is the .prj file, it strips the directory away and grabs file name if ext == '.prj': files[ext].append(line.strip().split(' ')[1].split('\\')[-1]) # Creates appropriate arrays for certain files in the right format for file in files: file = file.strip() file = file.split(' ') # If the ext is in the list of files we care about, it addes to the dict files[ext].append(line.strip().split(' ')[-1]) # Grabs all the IPF files if file[1].endswith('.ipf'): ipf.append(file[1]) else: # Grabs all the SUP files if file[1].endswith('.sup'): sup.append(file[1]) # Adds to the dict even if not in files we care about files[ext.strip()].append(line) files_dict.append(file) # Gets the base filepath files['basepath'] = os.path.dirname(os.path.abspath(atf_file)) # Creates a dict out of file lists for GPF, PRJ, IPF, and ATF files_dict = (dict(files_dict)) # Sets the value of IMAGE_IPF to all IPF images files_dict['IMAGE_IPF'] = ipf files_dict['IMAGE_IPF'] = files['.ipf'] # Sets the value of IMAGE_SUP to all SUP images files_dict['IMAGE_SUP'] = sup files_dict['IMAGE_SUP'] = files['.sup'] # Sets value for GPF file files_dict['GP_FILE'] = files['.gpf'][0] # Sets value for PRJ file files_dict['PROJECT'] = files['.prj'][0] # Sets the value of PATH to the path of the ATF file files_dict['PATH'] = os.path.dirname(os.path.abspath(atf_file)) files_dict['PATH'] = files['basepath'] return files_dict # converts columns l. and s. to isis def line_sample_size(record, path): with open(os.path.join(path, record['ipf_file'] + '.sup')) as f: for i, line in enumerate(f): if i == 2: img_index = line.split('\\') img_index = img_index[-1].strip() img_index = img_index.split('.')[0] if i == 3: line_size = line.split(' ') line_size = line_size[-1].strip() assert int(line_size) > 0, "Line number {} from {} is a negative number: Invalid Data".format(line_size, record['ipf_file']) if i == 4: sample_size = line.split(' ') sample_size = sample_size[-1].strip() assert int(sample_size) > 0, "Sample number {} from {} is a negative number: Invalid Data".format(sample_size, record['ipf_file']) break line_size = int(line_size)/2.0 + record['l.'] + 1 sample_size = int(sample_size)/2.0 + record['s.'] + 1 return sample_size, line_size, img_index # converts known to ISIS keywords def known(record): if record['known'] == 0: return 'Free' elif record['known'] == 1 or record['known'] == 2 or record['known'] == 3: return 'Constrained' # converts +/- 180 system to 0 - 360 system def to_360(num): return num % 360 # ocentric to ographic latitudes def oc2og(dlat, dMajorRadius, dMinorRadius): try: dlat = math.radians(dlat) dlat = math.atan(((dMajorRadius / dMinorRadius)**2) * (math.tan(dlat))) dlat = math.degrees(dlat) except: print ("Error in oc2og conversion") return dlat # ographic to ocentric latitudes def og2oc(dlat, dMajorRadius, dMinorRadius): try: dlat = math.radians(dlat) dlat = math.atan((math.tan(dlat) / ((dMajorRadius / dMinorRadius)**2))) dlat = math.degrees(dlat) except: print ("Error in og2oc conversion") return dlat # gets eRadius and pRadius from a .prj file def get_axis(file): with open(file) as f: from collections import defaultdict files = defaultdict(list) for line in f: ext = line.strip().split(' ') files[ext[0]].append(ext[-1]) eRadius = float(files['A_EARTH'][0]) pRadius = eRadius * (1 - float(files['E_EARTH'][0])) return eRadius, pRadius # function to convert lat_Y_North to ISIS_lat def lat_ISIS_coord(record, semi_major, semi_minor): ocentric_coord = og2oc(record['lat_Y_North'], semi_major, semi_minor) coord_360 = to_360(ocentric_coord) return coord_360 # function to convert long_X_East to ISIS_lon def lon_ISIS_coord(record, semi_major, semi_minor): ocentric_coord = og2oc(record['long_X_East'], semi_major, semi_minor) coord_360 = to_360(ocentric_coord) return coord_360 def body_fix(record, semi_major, semi_minor): def body_fix(record, semi_major, semi_minor, inverse=False): """ Parameters ---------- record : ndarray (n,3) where columns are x, y, height or lon, lat, alt """ ecef = pyproj.Proj(proj='geocent', a=semi_major, b=semi_minor) lla = pyproj.Proj(proj='latlon', a=semi_major, b=semi_minor) lon, lat, height = pyproj.transform(lla, ecef, record['long_X_East'], record['lat_Y_North'], record['ht']) return lon, lat, height if inverse: lon, lat, height = pyproj.transform(ecef, lla, record[0], record[1], record[2]) return lon, lat, height else: y, x, z = pyproj.transform(lla, ecef, record[0], record[1], record[2]) return y, x, z def ignore_toggle(record): if record['stat'] == 0: return True else: return False # TODO: Does isis cnet need a convariance matrix for sigmas? Even with a static matrix of 1,1,1,1 def compute_sigma_covariance_matrix(lat, lon, rad, latsigma, lonsigma, radsigma, semimajor_axis): """ Given geospatial coordinates, desired accuracy sigmas, and an equitorial radius, compute a 2x3 sigma covariange matrix. Parameters ---------- lat : float A point's latitude in degrees lon : float A point's longitude in degrees rad : float The radius (z-value) of the point in meters latsigma : float The desired latitude accuracy in meters (Default 10.0) lonsigma : float The desired longitude accuracy in meters (Default 10.0) radsigma : float The desired radius accuracy in meters (Defualt: 15.0) semimajor_axis : float The semi-major or equitorial radius in meters (Default: 1737400.0 - Moon) Returns ------- rectcov : ndarray (2,3) covariance matrix """ lat = math.radians(lat) lon = math.radians(lon) # SetSphericalSigmasDistance scaled_lat_sigma = latsigma / semimajor_axis # This is specific to each lon. scaled_lon_sigma = lonsigma * math.cos(lat) / semimajor_axis # SetSphericalSigmas cov = np.eye(3,3) cov[0,0] = scaled_lat_sigma ** 2 cov[1,1] = scaled_lon_sigma ** 2 cov[2,2] = radsigma ** 2 # Approximate the Jacobian j = np.zeros((3,3)) cosphi = math.cos(lat) sinphi = math.sin(lat) coslambda = math.cos(lon) sinlambda = math.sin(lon) rcosphi = rad * cosphi rsinphi = rad * sinphi j[0,0] = -rsinphi * coslambda j[0,1] = -rcosphi * sinlambda j[0,2] = cosphi * coslambda j[1,0] = -rsinphi * sinlambda j[1,1] = rcosphi * coslambda j[1,2] = cosphi * sinlambda j[2,0] = rcosphi j[2,1] = 0. j[2,2] = sinphi mat = j.dot(cov) mat = mat.dot(j.T) rectcov = np.zeros((2,3)) rectcov[0,0] = mat[0,0] rectcov[0,1] = mat[0,1] rectcov[0,2] = mat[0,2] rectcov[1,0] = mat[1,1] rectcov[1,1] = mat[1,2] rectcov[1,2] = mat[2,2] return rectcov # return np.array([[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]]) def compute_cov_matrix(record, semimajor_axis): cov_matrix = compute_sigma_covariance_matrix(record['lat_Y_North'], record['long_X_East'], record['ht'], record['sig0'], record['sig1'], record['sig2'], semimajor_axis) return cov_matrix.ravel().tolist() # applys transformations to columns def apply_transformations(atf_dict, df): prj_file = os.path.join(atf_dict['PATH'], atf_dict['PROJECT'].split('\\')[-1]) prj_file = os.path.join(atf_dict['PATH'], atf_dict['PROJECT']) eRadius, pRadius = get_axis(prj_file) lla = np.array([[df['long_X_East']], [df['lat_Y_North']], [df['ht']]]) ecef = body_fix(lla, semi_major = eRadius, semi_minor = pRadius, inverse=False) df['s.'], df['l.'], df['image_index'] = (zip(*df.apply(line_sample_size, path = atf_dict['PATH'], axis=1))) df['known'] = df.apply(known, axis=1) df['lat_Y_North'] = df.apply(lat_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1) df['long_X_East'] = df.apply(lon_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1) df['long_X_East'], df['lat_Y_North'], df['ht'] = zip(*df.apply(body_fix, semi_major = eRadius, semi_minor = pRadius, axis = 1)) df['long_X_East'] = ecef[0][0] df['lat_Y_North'] = ecef[1][0] df['ht'] = ecef[2][0] df['aprioriCovar'] = df.apply(compute_cov_matrix, semimajor_axis = eRadius, axis=1) df['ignore'] = df.apply(ignore_toggle, axis=1) def socet2isis(prj_file): # Read in and setup the atf dict of information atf_dict = read_atf(prj_file) # Get the gpf and ipf files using atf dict gpf_file = os.path.join(atf_dict['PATH'], atf_dict['GP_FILE']); ipf_list = [os.path.join(atf_dict['PATH'], i) for i in atf_dict['IMAGE_IPF']] # Read in the gpf file and ipf file(s) into seperate dataframes gpf_df = read_gpf(gpf_file) ipf_df = read_ipf(ipf_list) # Check for differences between point ids using each dataframes # point ids as a reference gpf_pt_idx = pd.Index(pd.unique(gpf_df['point_id'])) ipf_pt_idx = pd.Index(pd.unique(ipf_df['pt_id'])) point_diff = ipf_pt_idx.difference(gpf_pt_idx) if len(point_diff) != 0: warnings.warn("The following points found in ipf files missing from gpf file: \n\n{}. \ \n\nContinuing, but these points will be missing from the control network".format(list(point_diff))) # Merge the two dataframes on their point id columns socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id') # Apply the transformations apply_transformations(atf_dict, socet_df) # Define column remap for socet dataframe column_remap = {'l.': 'y', 's.': 'x', 'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type', 'res_l': 'lineResidual', 'res_s': 'sampleResidual', 'known': 'Type', 'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ', 'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma'} 'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma', 'sig_l': 'linesigma', 'sig_s': 'samplesigma'} # Rename the columns using the column remap above socet_df.rename(columns = column_remap, inplace=True) # Return the socet dataframe to be converted to a control net return socet_df # creates a dict of serial numbers with the cub being the key def serial_numbers(image_dict, path): def serial_numbers(images, path, extension): serial_dict = dict() for key in image_dict: snum = sn.generate_serial_number(os.path.join(path, image_dict[key])) for image in images: snum = sn.generate_serial_number(os.path.join(path, image + extension)) snum = snum.replace('Mars_Reconnaissance_Orbiter', 'MRO') serial_dict[key] = snum serial_dict[image] = snum return serial_dict ``` %% Cell type:code id: tags: ``` python # Setup stuffs for the cub information namely the path and extension path = '/Volumes/Blueman/' atf_file = get_path('CTX_Athabasca_Middle_step0.atf') path = '/path/where/cub/files/are/' # Extension of your cub files extension = '.something.cub' image_dict = {'P01_001540_1889_XI_08N204W' : 'P01_001540_1889_XI_08N204W.lev1.cub', 'P01_001606_1897_XI_09N203W' : 'P01_001606_1897_XI_09N203W.lev1.cub', 'P02_001804_1889_XI_08N204W' : 'P02_001804_1889_XI_08N204W.lev1.cub', 'P03_002226_1895_XI_09N203W' : 'P03_002226_1895_XI_09N203W.lev1.cub', 'P03_002371_1888_XI_08N204W' : 'P03_002371_1888_XI_08N204W.lev1.cub', 'P19_008344_1894_XN_09N203W' : 'P19_008344_1894_XN_09N203W.lev1.cub', 'P20_008845_1894_XN_09N203W' : 'P20_008845_1894_XN_09N203W.lev1.cub'} # Path to atf file atf_file = ('/path/to/atf/file') socet_df = socet2isis(atf_file) images = pd.unique(socet_df['ipf_file']) serial_dict = serial_numbers(image_dict, path) serial_dict = serial_numbers(images, path, extension) # creates the control network cn.to_isis('/Volumes/Blueman/banana.net', socet_df, serial_dict) cn.to_isis('/path/you/want/the/cnet/to/be/in/cn.net', socet_df, serial_dict) ``` %% Cell type:code id: tags: ``` python ``` plio/io/io_controlnetwork.py +9 −9 Original line number Diff line number Diff line from time import gmtime, strftime import pandas as pd import numpy as np import pvl from plio.io import ControlNetFileV0002_pb2 as cnf Loading Loading @@ -267,24 +268,24 @@ class IsisStore(object): # As per protobuf docs for assigning to a repeated field. if attr == 'aprioriCovar': arr = g.iloc[0]['aprioriCovar'] point_spec.aprioriCovar.extend(arr.ravel().tolist()) if isinstance(arr, np.ndarray): arr = arr.ravel().tolist() point_spec.aprioriCovar.extend(arr) else: setattr(point_spec, attr, attrtype(g.iloc[0][attr])) point_spec.type = 2 # Hardcoded to free point_spec.type = 2 # Hardcoded to free this is bad # The reference index should always be the image with the lowest index point_spec.referenceIndex = 0 # A single extend call is cheaper than many add calls to pack points measure_iterable = [] for node_id, m in g.iterrows(): measure_spec = point_spec.Measure() # For all of the attributes, set if they are an dict accessible attr of the obj. for attr, attrtype in self.measure_attrs: if attr in g.columns: setattr(measure_spec, attr, attrtype(m[attr])) measure_spec.serialnumber = serials[m.image_index] measure_spec.sample = m.x measure_spec.line = m.y Loading @@ -298,7 +299,6 @@ class IsisStore(object): point_message = point_spec.SerializeToString() point_sizes.append(point_spec.ByteSize()) point_messages.append(point_message) return point_messages, point_sizes def create_buffer_header(self, networkid, targetname, Loading Loading
notebooks/Socet2ISIS.ipynb +151 −47 Original line number Diff line number Diff line %% Cell type:code id: tags: ``` python import os import sys from functools import singledispatch import warnings import pandas as pd import numpy as np import math import pyproj # sys.path.insert(0, "/home/tthatcher/Desktop/Projects/Plio/plio") from plio.examples import get_path from plio.io.io_bae import read_gpf, read_ipf from collections import defaultdict import plio.io.io_controlnetwork as cn import plio.io.isis_serial_number as sn ``` %% Cell type:code id: tags: ``` python # Reads a .atf file and outputs all of the # .ipf, .gpf, .sup, .prj, and path to locate the # .apf file (should be the same as all others) def read_atf(atf_file): with open(atf_file) as f: files = [] ipf = [] sup = [] # Extensions of files we want files_ext = ['.prj', '.sup', '.ipf', '.gpf'] files_dict = [] files = defaultdict(list) # Grabs every PRJ, GPF, SUP, and IPF image from the ATF file for line in f: if line[-4:-1] == 'prj' or line[-4:-1] == 'gpf' or line[-4:-1] == 'sup' or line[-4:-1] == 'ipf' or line[-4:-1] == 'atf': files.append(line) ext = os.path.splitext(line)[-1].strip() # Check is needed for split as all do not have a space if ext in files_ext: files = np.array(files) # If it is the .prj file, it strips the directory away and grabs file name if ext == '.prj': files[ext].append(line.strip().split(' ')[1].split('\\')[-1]) # Creates appropriate arrays for certain files in the right format for file in files: file = file.strip() file = file.split(' ') # If the ext is in the list of files we care about, it addes to the dict files[ext].append(line.strip().split(' ')[-1]) # Grabs all the IPF files if file[1].endswith('.ipf'): ipf.append(file[1]) else: # Grabs all the SUP files if file[1].endswith('.sup'): sup.append(file[1]) # Adds to the dict even if not in files we care about files[ext.strip()].append(line) files_dict.append(file) # Gets the base filepath files['basepath'] = os.path.dirname(os.path.abspath(atf_file)) # Creates a dict out of file lists for GPF, PRJ, IPF, and ATF files_dict = (dict(files_dict)) # Sets the value of IMAGE_IPF to all IPF images files_dict['IMAGE_IPF'] = ipf files_dict['IMAGE_IPF'] = files['.ipf'] # Sets the value of IMAGE_SUP to all SUP images files_dict['IMAGE_SUP'] = sup files_dict['IMAGE_SUP'] = files['.sup'] # Sets value for GPF file files_dict['GP_FILE'] = files['.gpf'][0] # Sets value for PRJ file files_dict['PROJECT'] = files['.prj'][0] # Sets the value of PATH to the path of the ATF file files_dict['PATH'] = os.path.dirname(os.path.abspath(atf_file)) files_dict['PATH'] = files['basepath'] return files_dict # converts columns l. and s. to isis def line_sample_size(record, path): with open(os.path.join(path, record['ipf_file'] + '.sup')) as f: for i, line in enumerate(f): if i == 2: img_index = line.split('\\') img_index = img_index[-1].strip() img_index = img_index.split('.')[0] if i == 3: line_size = line.split(' ') line_size = line_size[-1].strip() assert int(line_size) > 0, "Line number {} from {} is a negative number: Invalid Data".format(line_size, record['ipf_file']) if i == 4: sample_size = line.split(' ') sample_size = sample_size[-1].strip() assert int(sample_size) > 0, "Sample number {} from {} is a negative number: Invalid Data".format(sample_size, record['ipf_file']) break line_size = int(line_size)/2.0 + record['l.'] + 1 sample_size = int(sample_size)/2.0 + record['s.'] + 1 return sample_size, line_size, img_index # converts known to ISIS keywords def known(record): if record['known'] == 0: return 'Free' elif record['known'] == 1 or record['known'] == 2 or record['known'] == 3: return 'Constrained' # converts +/- 180 system to 0 - 360 system def to_360(num): return num % 360 # ocentric to ographic latitudes def oc2og(dlat, dMajorRadius, dMinorRadius): try: dlat = math.radians(dlat) dlat = math.atan(((dMajorRadius / dMinorRadius)**2) * (math.tan(dlat))) dlat = math.degrees(dlat) except: print ("Error in oc2og conversion") return dlat # ographic to ocentric latitudes def og2oc(dlat, dMajorRadius, dMinorRadius): try: dlat = math.radians(dlat) dlat = math.atan((math.tan(dlat) / ((dMajorRadius / dMinorRadius)**2))) dlat = math.degrees(dlat) except: print ("Error in og2oc conversion") return dlat # gets eRadius and pRadius from a .prj file def get_axis(file): with open(file) as f: from collections import defaultdict files = defaultdict(list) for line in f: ext = line.strip().split(' ') files[ext[0]].append(ext[-1]) eRadius = float(files['A_EARTH'][0]) pRadius = eRadius * (1 - float(files['E_EARTH'][0])) return eRadius, pRadius # function to convert lat_Y_North to ISIS_lat def lat_ISIS_coord(record, semi_major, semi_minor): ocentric_coord = og2oc(record['lat_Y_North'], semi_major, semi_minor) coord_360 = to_360(ocentric_coord) return coord_360 # function to convert long_X_East to ISIS_lon def lon_ISIS_coord(record, semi_major, semi_minor): ocentric_coord = og2oc(record['long_X_East'], semi_major, semi_minor) coord_360 = to_360(ocentric_coord) return coord_360 def body_fix(record, semi_major, semi_minor): def body_fix(record, semi_major, semi_minor, inverse=False): """ Parameters ---------- record : ndarray (n,3) where columns are x, y, height or lon, lat, alt """ ecef = pyproj.Proj(proj='geocent', a=semi_major, b=semi_minor) lla = pyproj.Proj(proj='latlon', a=semi_major, b=semi_minor) lon, lat, height = pyproj.transform(lla, ecef, record['long_X_East'], record['lat_Y_North'], record['ht']) return lon, lat, height if inverse: lon, lat, height = pyproj.transform(ecef, lla, record[0], record[1], record[2]) return lon, lat, height else: y, x, z = pyproj.transform(lla, ecef, record[0], record[1], record[2]) return y, x, z def ignore_toggle(record): if record['stat'] == 0: return True else: return False # TODO: Does isis cnet need a convariance matrix for sigmas? Even with a static matrix of 1,1,1,1 def compute_sigma_covariance_matrix(lat, lon, rad, latsigma, lonsigma, radsigma, semimajor_axis): """ Given geospatial coordinates, desired accuracy sigmas, and an equitorial radius, compute a 2x3 sigma covariange matrix. Parameters ---------- lat : float A point's latitude in degrees lon : float A point's longitude in degrees rad : float The radius (z-value) of the point in meters latsigma : float The desired latitude accuracy in meters (Default 10.0) lonsigma : float The desired longitude accuracy in meters (Default 10.0) radsigma : float The desired radius accuracy in meters (Defualt: 15.0) semimajor_axis : float The semi-major or equitorial radius in meters (Default: 1737400.0 - Moon) Returns ------- rectcov : ndarray (2,3) covariance matrix """ lat = math.radians(lat) lon = math.radians(lon) # SetSphericalSigmasDistance scaled_lat_sigma = latsigma / semimajor_axis # This is specific to each lon. scaled_lon_sigma = lonsigma * math.cos(lat) / semimajor_axis # SetSphericalSigmas cov = np.eye(3,3) cov[0,0] = scaled_lat_sigma ** 2 cov[1,1] = scaled_lon_sigma ** 2 cov[2,2] = radsigma ** 2 # Approximate the Jacobian j = np.zeros((3,3)) cosphi = math.cos(lat) sinphi = math.sin(lat) coslambda = math.cos(lon) sinlambda = math.sin(lon) rcosphi = rad * cosphi rsinphi = rad * sinphi j[0,0] = -rsinphi * coslambda j[0,1] = -rcosphi * sinlambda j[0,2] = cosphi * coslambda j[1,0] = -rsinphi * sinlambda j[1,1] = rcosphi * coslambda j[1,2] = cosphi * sinlambda j[2,0] = rcosphi j[2,1] = 0. j[2,2] = sinphi mat = j.dot(cov) mat = mat.dot(j.T) rectcov = np.zeros((2,3)) rectcov[0,0] = mat[0,0] rectcov[0,1] = mat[0,1] rectcov[0,2] = mat[0,2] rectcov[1,0] = mat[1,1] rectcov[1,1] = mat[1,2] rectcov[1,2] = mat[2,2] return rectcov # return np.array([[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]]) def compute_cov_matrix(record, semimajor_axis): cov_matrix = compute_sigma_covariance_matrix(record['lat_Y_North'], record['long_X_East'], record['ht'], record['sig0'], record['sig1'], record['sig2'], semimajor_axis) return cov_matrix.ravel().tolist() # applys transformations to columns def apply_transformations(atf_dict, df): prj_file = os.path.join(atf_dict['PATH'], atf_dict['PROJECT'].split('\\')[-1]) prj_file = os.path.join(atf_dict['PATH'], atf_dict['PROJECT']) eRadius, pRadius = get_axis(prj_file) lla = np.array([[df['long_X_East']], [df['lat_Y_North']], [df['ht']]]) ecef = body_fix(lla, semi_major = eRadius, semi_minor = pRadius, inverse=False) df['s.'], df['l.'], df['image_index'] = (zip(*df.apply(line_sample_size, path = atf_dict['PATH'], axis=1))) df['known'] = df.apply(known, axis=1) df['lat_Y_North'] = df.apply(lat_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1) df['long_X_East'] = df.apply(lon_ISIS_coord, semi_major = eRadius, semi_minor = pRadius, axis=1) df['long_X_East'], df['lat_Y_North'], df['ht'] = zip(*df.apply(body_fix, semi_major = eRadius, semi_minor = pRadius, axis = 1)) df['long_X_East'] = ecef[0][0] df['lat_Y_North'] = ecef[1][0] df['ht'] = ecef[2][0] df['aprioriCovar'] = df.apply(compute_cov_matrix, semimajor_axis = eRadius, axis=1) df['ignore'] = df.apply(ignore_toggle, axis=1) def socet2isis(prj_file): # Read in and setup the atf dict of information atf_dict = read_atf(prj_file) # Get the gpf and ipf files using atf dict gpf_file = os.path.join(atf_dict['PATH'], atf_dict['GP_FILE']); ipf_list = [os.path.join(atf_dict['PATH'], i) for i in atf_dict['IMAGE_IPF']] # Read in the gpf file and ipf file(s) into seperate dataframes gpf_df = read_gpf(gpf_file) ipf_df = read_ipf(ipf_list) # Check for differences between point ids using each dataframes # point ids as a reference gpf_pt_idx = pd.Index(pd.unique(gpf_df['point_id'])) ipf_pt_idx = pd.Index(pd.unique(ipf_df['pt_id'])) point_diff = ipf_pt_idx.difference(gpf_pt_idx) if len(point_diff) != 0: warnings.warn("The following points found in ipf files missing from gpf file: \n\n{}. \ \n\nContinuing, but these points will be missing from the control network".format(list(point_diff))) # Merge the two dataframes on their point id columns socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id') # Apply the transformations apply_transformations(atf_dict, socet_df) # Define column remap for socet dataframe column_remap = {'l.': 'y', 's.': 'x', 'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type', 'res_l': 'lineResidual', 'res_s': 'sampleResidual', 'known': 'Type', 'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ', 'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma'} 'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma', 'sig_l': 'linesigma', 'sig_s': 'samplesigma'} # Rename the columns using the column remap above socet_df.rename(columns = column_remap, inplace=True) # Return the socet dataframe to be converted to a control net return socet_df # creates a dict of serial numbers with the cub being the key def serial_numbers(image_dict, path): def serial_numbers(images, path, extension): serial_dict = dict() for key in image_dict: snum = sn.generate_serial_number(os.path.join(path, image_dict[key])) for image in images: snum = sn.generate_serial_number(os.path.join(path, image + extension)) snum = snum.replace('Mars_Reconnaissance_Orbiter', 'MRO') serial_dict[key] = snum serial_dict[image] = snum return serial_dict ``` %% Cell type:code id: tags: ``` python # Setup stuffs for the cub information namely the path and extension path = '/Volumes/Blueman/' atf_file = get_path('CTX_Athabasca_Middle_step0.atf') path = '/path/where/cub/files/are/' # Extension of your cub files extension = '.something.cub' image_dict = {'P01_001540_1889_XI_08N204W' : 'P01_001540_1889_XI_08N204W.lev1.cub', 'P01_001606_1897_XI_09N203W' : 'P01_001606_1897_XI_09N203W.lev1.cub', 'P02_001804_1889_XI_08N204W' : 'P02_001804_1889_XI_08N204W.lev1.cub', 'P03_002226_1895_XI_09N203W' : 'P03_002226_1895_XI_09N203W.lev1.cub', 'P03_002371_1888_XI_08N204W' : 'P03_002371_1888_XI_08N204W.lev1.cub', 'P19_008344_1894_XN_09N203W' : 'P19_008344_1894_XN_09N203W.lev1.cub', 'P20_008845_1894_XN_09N203W' : 'P20_008845_1894_XN_09N203W.lev1.cub'} # Path to atf file atf_file = ('/path/to/atf/file') socet_df = socet2isis(atf_file) images = pd.unique(socet_df['ipf_file']) serial_dict = serial_numbers(image_dict, path) serial_dict = serial_numbers(images, path, extension) # creates the control network cn.to_isis('/Volumes/Blueman/banana.net', socet_df, serial_dict) cn.to_isis('/path/you/want/the/cnet/to/be/in/cn.net', socet_df, serial_dict) ``` %% Cell type:code id: tags: ``` python ```
plio/io/io_controlnetwork.py +9 −9 Original line number Diff line number Diff line from time import gmtime, strftime import pandas as pd import numpy as np import pvl from plio.io import ControlNetFileV0002_pb2 as cnf Loading Loading @@ -267,24 +268,24 @@ class IsisStore(object): # As per protobuf docs for assigning to a repeated field. if attr == 'aprioriCovar': arr = g.iloc[0]['aprioriCovar'] point_spec.aprioriCovar.extend(arr.ravel().tolist()) if isinstance(arr, np.ndarray): arr = arr.ravel().tolist() point_spec.aprioriCovar.extend(arr) else: setattr(point_spec, attr, attrtype(g.iloc[0][attr])) point_spec.type = 2 # Hardcoded to free point_spec.type = 2 # Hardcoded to free this is bad # The reference index should always be the image with the lowest index point_spec.referenceIndex = 0 # A single extend call is cheaper than many add calls to pack points measure_iterable = [] for node_id, m in g.iterrows(): measure_spec = point_spec.Measure() # For all of the attributes, set if they are an dict accessible attr of the obj. for attr, attrtype in self.measure_attrs: if attr in g.columns: setattr(measure_spec, attr, attrtype(m[attr])) measure_spec.serialnumber = serials[m.image_index] measure_spec.sample = m.x measure_spec.line = m.y Loading @@ -298,7 +299,6 @@ class IsisStore(object): point_message = point_spec.SerializeToString() point_sizes.append(point_spec.ByteSize()) point_messages.append(point_message) return point_messages, point_sizes def create_buffer_header(self, networkid, targetname, Loading