Loading bin/isis2socet +86 −1 Original line number Diff line number Diff line Loading @@ -2,16 +2,101 @@ import argparse import os import pandas as pd from plio.io.io_bae import save_gpf, save_ipf from plio.spatial.transformations import apply_isis_transformations import plio.io.io_controlnetwork as cn import plio.io.isis_serial_number as sn def parse_args(): parser = argparse.ArgumentParser() # Add args here parser.add_argument('cnet_file', help='Path to an isis control network.') parser.add_argument('e_radius', type=float, help='The semimajor radius of a given target.') parser.add_argument('p_radius', type=float, help='The semiminor radius of a given target.') parser.add_argument('cub_path', help='Path to the cub files associated with a control network.') parser.add_argument('cub_extension', help='Extension for all cubes.') parser.add_argument('cub_list', help='Path to a list file of all cubes being used') parser.add_argument('out_gpf', help='Path to save location of gpf file and new ipf files.') parser.add_argument('--adjusted', help='Flag for saving apriori values or adjusted values', default=False, required = False) return parser.parse_args() def main(args): print('Do some stuff') # Create cub dict to map ipf to cub df = cn.from_isis(args.cnet_file) e_radius = args.e_radius p_radius = e_radius * (1 - args.p_radius) cub_path = args.cub_path extension = args.cub_extension with open(args.cub_list, 'r') as f: lines = f.readlines() cub_list = [cub.replace('\n', '') for cub in lines] out_gpf = args.out_gpf adjusted_flag = args.adjusted # Create cub dict to map ipf to cub cub_dict = {i: i + extension for i in cub_list} # Create serial dict to match serial to ipf serial_dict = {sn.generate_serial_number(os.path.join(cub_path, i + extension)): i for i in cub_list} # Remove duplicate columns # There are better ways to do this but pandas was not having it columns = [] column_index = [] for i, column in enumerate(list(df.columns)): if column not in columns: column_index.append(i) columns.append(column) df = df.iloc[:, column_index] # Begin translation # Remap the ISIS columns to socet column names column_map = {'id': 'pt_id', 'line': 'l.', 'sample': 's.', 'lineResidual': 'res_l', 'sampleResidual': 'res_s', 'type': 'known', 'aprioriLatitudeSigma': 'sig0', 'aprioriLongitudeSigma': 'sig1', 'aprioriRadiusSigma': 'sig2', 'linesigma': 'sig_l', 'samplesigma': 'sig_s', 'ignore': 'stat'} # Depending on the adjusted flag, set the renames for columns appropriately if adjusted_flag: column_map['adjustedY'] = 'lat_Y_North' column_map['adjustedX'] = 'long_X_East' column_map['adjustedZ'] = 'ht' else: column_map['aprioriY'] = 'lat_Y_North' column_map['aprioriX'] = 'long_X_East' column_map['aprioriZ'] = 'ht' df.rename(columns = column_map, inplace=True) apply_isis_transformations(df, e_radius, p_radius, serial_dict, extension, cub_path) # Save the ipf(s) save_ipf(df, os.path.split(out_gpf)[0]) # Get the first record from each group as there all the same, put them # into a list, and sort it points = [int(i[1].index[0]) for i in df.groupby('pt_id')] points.sort() # Set the gpf_df to only the values we need and do a small rename gpf_df = df.iloc[points].copy() gpf_df.rename(columns = {'pt_id': 'point_id'}, inplace=True) # Save the gpf save_gpf(gpf_df, out_gpf) if __name__ == '__main__': main(parse_args()) bin/socet2isis +19 −25 Original line number Diff line number Diff line #!/usr/bin/env python import argparse import os import sys import argparse import warnings import csv import numpy as np from plio.examples import get_path from plio.io.io_bae import read_atf, read_gpf, read_ipf from plio.spatial.transformations import * from plio.spatial.transformations import apply_socet_transformations, serial_numbers import plio.io.io_controlnetwork as cn import pandas as pd Loading @@ -19,10 +16,9 @@ def parse_args(): # Add args here parser.add_argument('at_file', help='Path to the .atf file for a project.') parser.add_argument('cub_file_path', help='Path to cube files related to ipf files.') parser.add_argument('cub_ipf_map', help='Path to map file for all ipfs and cubes.') parser.add_argument('extension', help='Extension for all cubes being used.') parser.add_argument('target_name', help='Name of the target body used in the control net') parser.add_argument('--outpath', help='Directory for the control network to be output to.', required = False) parser.add_argument('--outpath', help='Directory for the control network to be output to.') return parser.parse_args() Loading @@ -38,9 +34,9 @@ def main(args): else: outpath = os.path.split(at_file)[0] with open(args.cub_ipf_map) as cub_ipf_map: reader = csv.reader(cub_ipf_map, delimiter = ',') image_dict = dict([(row[0], row[1]) for row in reader]) # with open(args.cub_ipf_map) as cub_ipf_map: # reader = csv.reader(cub_ipf_map, delimiter = ',') # image_dict = dict([(row[0], row[1]) for row in reader]) # Read in and setup the atf dict of information atf_dict = read_atf(at_file) Loading @@ -61,29 +57,27 @@ def main(args): point_diff = ipf_pt_idx.difference(gpf_pt_idx) if len(point_diff) != 0: warnings.warn("The following points found in ipf files missing from gpf file: " + "\n\n{}\n\n".format("\n".join(point_diff)) + "Continuing, but these points will be missing from the control " + "network.", stacklevel=3) warnings.warn("The following points found in ipf files missing from gpf file: \n\n{}. \ \n\nContinuing, but these points will be missing from the control network".format(list(point_diff))) # Merge the two dataframes on their point id columns socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id') # Apply the transformations apply_transformations(atf_dict, socet_df) apply_socet_transformations(atf_dict, socet_df) # Define column remap for socet dataframe column_remap = {'l.': 'y', 's.': 'x', 'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type', 'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ', 'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma'} column_map = {'pt_id': 'id', 'l.': 'y', 's.': 'x', 'res_l': 'lineResidual', 'res_s': 'sampleResidual', 'known': 'Type', 'lat_Y_North': 'aprioriY', 'long_X_East': 'aprioriX', 'ht': 'aprioriZ', 'sig0': 'aprioriLatitudeSigma', 'sig1': 'aprioriLongitudeSigma', 'sig2': 'aprioriRadiusSigma', 'sig_l': 'linesigma', 'sig_s': 'samplesigma'} # Rename the columns using the column remap above socet_df.rename(columns = column_remap, inplace=True) images = pd.unique(socet_df['ipf_file']) socet_df.rename(columns = column_map, inplace=True) # Build an image and serial dict assuming the cubes will be named as the IPFs are image_dict = {i: i + args.extension for i in pd.unique(socet_df['ipf_file'])} serial_dict = serial_numbers(image_dict, cub_path) # creates the control network Loading Loading
bin/isis2socet +86 −1 Original line number Diff line number Diff line Loading @@ -2,16 +2,101 @@ import argparse import os import pandas as pd from plio.io.io_bae import save_gpf, save_ipf from plio.spatial.transformations import apply_isis_transformations import plio.io.io_controlnetwork as cn import plio.io.isis_serial_number as sn def parse_args(): parser = argparse.ArgumentParser() # Add args here parser.add_argument('cnet_file', help='Path to an isis control network.') parser.add_argument('e_radius', type=float, help='The semimajor radius of a given target.') parser.add_argument('p_radius', type=float, help='The semiminor radius of a given target.') parser.add_argument('cub_path', help='Path to the cub files associated with a control network.') parser.add_argument('cub_extension', help='Extension for all cubes.') parser.add_argument('cub_list', help='Path to a list file of all cubes being used') parser.add_argument('out_gpf', help='Path to save location of gpf file and new ipf files.') parser.add_argument('--adjusted', help='Flag for saving apriori values or adjusted values', default=False, required = False) return parser.parse_args() def main(args): print('Do some stuff') # Create cub dict to map ipf to cub df = cn.from_isis(args.cnet_file) e_radius = args.e_radius p_radius = e_radius * (1 - args.p_radius) cub_path = args.cub_path extension = args.cub_extension with open(args.cub_list, 'r') as f: lines = f.readlines() cub_list = [cub.replace('\n', '') for cub in lines] out_gpf = args.out_gpf adjusted_flag = args.adjusted # Create cub dict to map ipf to cub cub_dict = {i: i + extension for i in cub_list} # Create serial dict to match serial to ipf serial_dict = {sn.generate_serial_number(os.path.join(cub_path, i + extension)): i for i in cub_list} # Remove duplicate columns # There are better ways to do this but pandas was not having it columns = [] column_index = [] for i, column in enumerate(list(df.columns)): if column not in columns: column_index.append(i) columns.append(column) df = df.iloc[:, column_index] # Begin translation # Remap the ISIS columns to socet column names column_map = {'id': 'pt_id', 'line': 'l.', 'sample': 's.', 'lineResidual': 'res_l', 'sampleResidual': 'res_s', 'type': 'known', 'aprioriLatitudeSigma': 'sig0', 'aprioriLongitudeSigma': 'sig1', 'aprioriRadiusSigma': 'sig2', 'linesigma': 'sig_l', 'samplesigma': 'sig_s', 'ignore': 'stat'} # Depending on the adjusted flag, set the renames for columns appropriately if adjusted_flag: column_map['adjustedY'] = 'lat_Y_North' column_map['adjustedX'] = 'long_X_East' column_map['adjustedZ'] = 'ht' else: column_map['aprioriY'] = 'lat_Y_North' column_map['aprioriX'] = 'long_X_East' column_map['aprioriZ'] = 'ht' df.rename(columns = column_map, inplace=True) apply_isis_transformations(df, e_radius, p_radius, serial_dict, extension, cub_path) # Save the ipf(s) save_ipf(df, os.path.split(out_gpf)[0]) # Get the first record from each group as there all the same, put them # into a list, and sort it points = [int(i[1].index[0]) for i in df.groupby('pt_id')] points.sort() # Set the gpf_df to only the values we need and do a small rename gpf_df = df.iloc[points].copy() gpf_df.rename(columns = {'pt_id': 'point_id'}, inplace=True) # Save the gpf save_gpf(gpf_df, out_gpf) if __name__ == '__main__': main(parse_args())
bin/socet2isis +19 −25 Original line number Diff line number Diff line #!/usr/bin/env python import argparse import os import sys import argparse import warnings import csv import numpy as np from plio.examples import get_path from plio.io.io_bae import read_atf, read_gpf, read_ipf from plio.spatial.transformations import * from plio.spatial.transformations import apply_socet_transformations, serial_numbers import plio.io.io_controlnetwork as cn import pandas as pd Loading @@ -19,10 +16,9 @@ def parse_args(): # Add args here parser.add_argument('at_file', help='Path to the .atf file for a project.') parser.add_argument('cub_file_path', help='Path to cube files related to ipf files.') parser.add_argument('cub_ipf_map', help='Path to map file for all ipfs and cubes.') parser.add_argument('extension', help='Extension for all cubes being used.') parser.add_argument('target_name', help='Name of the target body used in the control net') parser.add_argument('--outpath', help='Directory for the control network to be output to.', required = False) parser.add_argument('--outpath', help='Directory for the control network to be output to.') return parser.parse_args() Loading @@ -38,9 +34,9 @@ def main(args): else: outpath = os.path.split(at_file)[0] with open(args.cub_ipf_map) as cub_ipf_map: reader = csv.reader(cub_ipf_map, delimiter = ',') image_dict = dict([(row[0], row[1]) for row in reader]) # with open(args.cub_ipf_map) as cub_ipf_map: # reader = csv.reader(cub_ipf_map, delimiter = ',') # image_dict = dict([(row[0], row[1]) for row in reader]) # Read in and setup the atf dict of information atf_dict = read_atf(at_file) Loading @@ -61,29 +57,27 @@ def main(args): point_diff = ipf_pt_idx.difference(gpf_pt_idx) if len(point_diff) != 0: warnings.warn("The following points found in ipf files missing from gpf file: " + "\n\n{}\n\n".format("\n".join(point_diff)) + "Continuing, but these points will be missing from the control " + "network.", stacklevel=3) warnings.warn("The following points found in ipf files missing from gpf file: \n\n{}. \ \n\nContinuing, but these points will be missing from the control network".format(list(point_diff))) # Merge the two dataframes on their point id columns socet_df = ipf_df.merge(gpf_df, left_on='pt_id', right_on='point_id') # Apply the transformations apply_transformations(atf_dict, socet_df) apply_socet_transformations(atf_dict, socet_df) # Define column remap for socet dataframe column_remap = {'l.': 'y', 's.': 'x', 'res_l': 'LineResidual', 'res_s': 'SampleResidual', 'known': 'Type', 'lat_Y_North': 'AprioriY', 'long_X_East': 'AprioriX', 'ht': 'AprioriZ', 'sig0': 'AprioriLatitudeSigma', 'sig1': 'AprioriLongitudeSigma', 'sig2': 'AprioriRadiusSigma'} column_map = {'pt_id': 'id', 'l.': 'y', 's.': 'x', 'res_l': 'lineResidual', 'res_s': 'sampleResidual', 'known': 'Type', 'lat_Y_North': 'aprioriY', 'long_X_East': 'aprioriX', 'ht': 'aprioriZ', 'sig0': 'aprioriLatitudeSigma', 'sig1': 'aprioriLongitudeSigma', 'sig2': 'aprioriRadiusSigma', 'sig_l': 'linesigma', 'sig_s': 'samplesigma'} # Rename the columns using the column remap above socet_df.rename(columns = column_remap, inplace=True) images = pd.unique(socet_df['ipf_file']) socet_df.rename(columns = column_map, inplace=True) # Build an image and serial dict assuming the cubes will be named as the IPFs are image_dict = {i: i + args.extension for i in pd.unique(socet_df['ipf_file'])} serial_dict = serial_numbers(image_dict, cub_path) # creates the control network Loading