Unverified Commit 3781ed64 authored by Amy Stamile's avatar Amy Stamile Committed by GitHub
Browse files

Crism Driver 2.0 (#477)



* Initial crism driver.

* Updates instrument id and sensor name lookups.

* Fixes to_isd errors.

* Modified KernelSlice and crism_isd.

* Rename FRT00003B73_01_IF156S_TRR2_isis.lbl to FRT00003B73_01_IF156S_TRR2_isis3.lbl

* Fixed rebase merge conflicts

* Actually fixed rebase

* Fixed rebase.

* Completed crism tests

* Fixed crism ISD

* Forced exact ck times for crism to false

* Fixed Crism test again after rebase

Co-authored-by: default avatarAmy Stamile <astamile@contractor.usgs.gov>
Co-authored-by: default avatarJesse Mapel <jmapel@usgs.gov>
Co-authored-by: default avataracpaquette <acp263@nau.edu>
parent 37077467
Loading
Loading
Loading
Loading
+123 −1
Original line number Diff line number Diff line
@@ -7,6 +7,7 @@ from ale.base.label_pds3 import Pds3Label
from ale.base.label_isis import IsisLabel
from ale.base.type_distortion import RadialDistortion, NoDistortion
from ale.base.type_sensor import LineScanner
from ale.base.type_distortion import NoDistortion

from ale import util

@@ -689,6 +690,114 @@ class MroHiRiseIsisLabelNaifSpiceDriver(LineScanner, IsisLabel, NaifSpice, Radia
        """
        return {**super().naif_keywords, **util.query_kernel_pool(f"*{self.ccd_ikid}*")}

    @property
    def sensor_model_version(self):
        """
        Returns
        -------
        : int
          ISIS sensor model version
        """
        return 1


class MroCrismIsisLabelNaifSpiceDriver(LineScanner, IsisLabel, NaifSpice, NoDistortion, Driver):
    """
    Driver for reading Crism ISIS labels.
    """

    @property
    def instrument_id(self):
        """
        Returns an instrument id for uniquely identifying the instrument, but often
        also used to be piped into Spice Kernels to acquire IKIDs. Therefore they
        the same ID the Spice expects in bods2c calls.
        Expects instrument_id to be defined in the IsisLabel mixin. This should be
        a string of the form 'CRISM'

        Returns
        -------
        : str
          instrument id
        """
        id_lookup = {
        "CRISM" : "MRO_CRISM_VNIR"
        }
        return id_lookup[super().instrument_id]

    @property
    def ephemeris_start_time(self):
        """
        Returns the starting ephemeris time of the image. Expects spacecraft_id to
        be defined. NAIF code -74999 was obtained from ISIS Crism camera model. Expects
        spacecraft_clock_start_count to be defined. This must be a string
        containing the start clock count of the spacecraft

        Returns
        -------
        : double
          Starting ephemeris time of the image
        """
        return spice.scs2e(-74999, self.spacecraft_clock_start_count)

    @property
    def ephemeris_stop_time(self):
        """
        Returns the ephemeris stop time of the image. Expects spacecraft_id to
        be defined. NAIF code -74999 was obtained from ISIS Crism camera model.
        Expects spacecraft_clock_stop_count to be defined. This must be a string
        containing the stop clock count of the spacecraft

        Returns
        -------
        : double
          Ephemeris stop time of the image
        """
        return spice.scs2e(-74999, self.spacecraft_clock_stop_count)

    @property
    def spacecraft_name(self):
        """
        Returns the spacecraft name used in various Spice calls to acquire
        ephemeris data.
        Expects the platform_name to be defined. This should be a string of
        the form 'MARS RECONNAISSANCE ORBITER'

        Returns
        -------
        : str
          spacecraft name
        """
        name_lookup = {
            'MARS RECONNAISSANCE ORBITER': 'MRO'
        }
        return name_lookup[super().platform_name]

    @property
    def sensor_name(self):
        """
        Returns the name of the instrument

        Returns
        -------
        : str
          name of the instrument.
        """
        return self.instrument_id

    @property
    def sensor_frame_id(self):
        """
        Returns the Naif ID code for the sensor reference frame.
        This is the frame of the OsirisRex instrument itself, and is not dependent on filter.

        Returns
        -------
        : int
          Naif ID code for the sensor frame
        """
        return -74000

    @property
    def sensor_model_version(self):
        """
@@ -700,3 +809,16 @@ class MroHiRiseIsisLabelNaifSpiceDriver(LineScanner, IsisLabel, NaifSpice, Radia
          ISIS sensor model version
        """
        return 1

    @property
    def line_exposure_duration(self):
        """
        Line exposure duration calculated by the ephemeris time divided by the
        number of lines.

        Returns
        -------
        : float
          Returns the line exposure duration in seconds.
        """
        return (self.ephemeris_stop_time - self.ephemeris_start_time) / self.image_lines
+0 −1
Original line number Diff line number Diff line
@@ -272,7 +272,6 @@ def get_kernels_from_isis_pvl(kernel_group, expand=True, format_as="list"):
                kernels.extend(kernel)
        if expand:
            isisprefs = get_isis_preferences()

            if not "DataDirectory" in isisprefs:
              warnings.warn("No IsisPreferences file found, is your ISISROOT env var set?")

+6 −1
Original line number Diff line number Diff line
%% Cell type:code id: tags:

``` python
import spiceypy as spice
import pvl
import os
import re
import subprocess
from ale import util
from itertools import chain
import io
import networkx as nx


# These should be provided when running this script.
cube = "leisa/lsb_0296962438_0x53c_eng/lsb_0296962438_0x53c_eng-spiced.cub"
output_dir = "leisa/lsb_0296962438_0x53c_eng/" # Output dir for created kernel files
data_dir = "isis3/isis_data/" # Dir of where to pull original kernels from

def merge_intervals(intervals):
    """
    Merge a set of intervals. The intervals are assumed to be closed, that is they include the end-points.

    Parameters
    ----------
    intervals : list
                The input list of intrvals where each interval is a tuple of (start, end)

    Returns
    -------
    : list
      A sorted list of the merged intervals
    """
    sorted_intervals = sorted(intervals, key=lambda tup: tup[0])
    merged = [sorted_intervals[0]]
    for interval in sorted_intervals[1:]:
        # No intersection
        if interval[0] > merged[-1][1]:
            merged.append(interval)
        # Intersection, but new interval isn't wholey contained
        elif interval[1] > merged[-1][1]:
            merged[-1] = (merged[-1][0], interval[1])
    return merged

def add_light_time_correction(cube_info, padding=120):
    """
    Compute the time intervals for the image and any light time correction

    Parameters
    ----------
    cube_info : ordered dict
                The cube info from ale.util.generate_kernels_from_cube
    padding : float
              Time padding in seconds to add to each interval

    Returns
    -------
    : list
      A sorted list of the intervals as (start_et, stop_et)
    """
    image_start_et = spice.scs2e(cube_info['SpacecraftID'], cube_info['SpacecraftClockCount'])

    image_end_et = image_start_et + cube_info['ExposureDuration'] * cube_info['Lines']

    inst_state, inst_lt = spice.spkez(cube_info['SpacecraftID'], image_start_et, 'J2000', 'NONE', 0)
    target_state, target_lt = spice.spkez(cube_info['TargetID'], image_start_et, 'J2000', 'NONE', 0)
    sun_state, sun_lt = spice.spkez(10, image_start_et, 'J2000', 'NONE', cube_info['TargetID'])

    intervals = [
        (image_start_et - padding, image_end_et + padding),
        (image_start_et - padding - inst_lt, image_end_et + padding - inst_lt),
        (image_start_et - padding - target_lt, image_end_et + padding - target_lt),
        (image_start_et - padding - sun_lt, image_end_et + padding - sun_lt)]
    return merge_intervals(intervals)
```

%% Cell type:code id: tags:

``` python
# These are the processing steps. This will make use of the cube provided further up to create smaller,
# more manageable kernel files for ale testing purposes. This currently only handles ck and spk files.

# Get dictionary of kernel lists from cube
cube_info = util.generate_kernels_from_cube(cube, format_as = 'dict')

# Replace path variables with absolute paths for kernels
for kernel_list in cube_info:
    for index, kern in enumerate(cube_info[kernel_list]):
        if kern is not None:
            cube_info[kernel_list][index] = data_dir + kern.strip('$')

# Create ordered list of kernels for furnishing
kernels = [kernel for kernel in chain.from_iterable(cube_info.values()) if isinstance(kernel, str)]
spice.furnsh(kernels)

# Loads cube as pvl to extract rest of data
cube_pvl = pvl.load(cube)

# Save other necesary info in cube_info dict
cube_info.update(Lines = cube_pvl['IsisCube']['Core']['Dimensions']['Lines'])
cube_info.update(SpacecraftClockCount = cube_pvl['IsisCube']['Instrument']['SpacecraftClockStartCount'])
cube_info.update(ExposureDuration = cube_pvl['IsisCube']['Instrument']['ExposureDuration'])
cube_info.update(TargetID = spice.bods2c(cube_pvl['IsisCube']['Instrument']['TargetName']))
cube_info.update(SpacecraftID = spice.bods2c(cube_pvl['IsisCube']['Instrument']['SpacecraftName']))

# Account for light time correction
intervals = add_light_time_correction(cube_info)

# For each binary ck kernel specified in cube, run the ckslicer, comment and to-transfer commands
for ck in [k for k in kernels if k.lower().endswith('.bc')]:
    ck_path, ck_file_extension = os.path.splitext(ck)
    ck_basename = os.path.basename(ck_path)
    for index, interval in enumerate(intervals):
        for frame in util.get_ck_frames(ck):
            output_basename = os.path.join(output_dir, ck_basename + '_' + str(index) + '_sliced_' + str(frame))
            output_kern = output_basename + ck_file_extension
            output_comments = output_basename + '.cmt'
            start_sclk = spice.sce2s(cube_info['SpacecraftID'], interval[0])
            end_sclk = spice.sce2s(cube_info['SpacecraftID'], interval[1])
            # Create new sliced ck kernel
            ckslicer_command = ["ckslicer",
                                    '-LSK {}'.format(cube_info['LeapSecond'][0]),
                                    '-SCLK {}'.format(cube_info['SpacecraftClock'][0]),
                                    '-INPUTCK {}'.format(ck),
                                    '-OUTPUTCK {}'.format(output_kern),
                                    '-ID {}'.format(frame),
                                    '-ID {}'.format(str(frame)),
                                    '-TIMETYPE {}'.format('SCLK'),
                                    '-START {}'.format(start_sclk),
                                    '-STOP {}'.format(end_sclk)]
            subprocess.run(ckslicer_command, check=True)

            # Remove old comments from new ck kernel
            commnt_command = ['commnt', '-d {}'.format(output_kern)]
            subprocess.run(commnt_command, check=True)

            with open(output_comments, 'w+') as comment_file:
                comment_file.write("This CK is for testing with the image: {}\n".format(cube))
                comment_file.write("\nThis CK was generated using the following command: {}\n")
                comment_file.write(" ".join(ckslicer_command))

            # Add new comments to new ck kernel
            new_commnts_command = ["commnt", "-a {}".format(output_kern), output_comments]
            subprocess.run(new_commnts_command, check=True)

            # Create the transfer file of the new ck kernel
            subprocess.run(["toxfr", output_kern], check=True)

# Create the config file for the spkmerge command
for index, interval in enumerate(intervals):
    output_spk_basename = os.path.join(output_dir, os.path.basename(os.path.splitext(cube)[0]) + '_' + str(index))
    output_spk = output_spk_basename + '.bsp'
    start_utc = spice.et2utc(interval[0], 'c', 3)
    end_utc = spice.et2utc(interval[1], 'c', 3)
    spk_dep_tree = util.create_spk_dependency_tree([k for k in kernels if k.lower().endswith('.bsp')])
    config_string = util.spkmerge_config_string(spk_dep_tree,
                                                output_spk,
                                                [cube_info['TargetID'], cube_info['SpacecraftID'], 10],
                                                cube_info['LeapSecond'][0],
                                                start_utc,
                                                end_utc)
    with open(output_spk_basename + '.conf', 'w+') as spk_config:
        spk_config.write(config_string)

    # Create the new SPK
    spkmerge_command = ["spkmerge", spk_config.name]
    subprocess.run(spkmerge_command, check=True)

    # Create the transfer file of the new SPK kernel
    subprocess.run(["toxfr", output_spk], check=True)
```

%% Cell type:code id: tags:

``` python
```

%% Cell type:code id: tags:

``` python
```
+101 −0
Original line number Diff line number Diff line
%% Cell type:markdown id: tags:

# Writing out a USGSCSM ISD from a PDS3 Cassini ISS image

%% Cell type:code id: tags:

``` python
import os
import json

import ale
from ale.drivers.cassini_drivers import CassiniIssPds3LabelNaifSpiceDriver
from ale.formatters.usgscsm_formatter import to_usgscsm
```

%% Cell type:markdown id: tags:

## Instantiating an ALE driver

ALE drivers are objects that define how to acquire common ISD keys from an input image format, in this case we are reading in a PDS3 image using NAIF SPICE kernels for exterior orientation data. If the driver utilizes NAIF SPICE kernels, it is implemented as a [context manager](https://docs.python.org/3/reference/datamodel.html#context-managers) and will furnish metakernels when entering the context (i.e. when entering the `with` block) and free the metakernels on exit. This maintains the integrity of spicelib's internal data structures. These driver objects are short-lived and are input to a formatter function that consumes the API to create a serializable file format. `ale.formatters` contains available formatter functions.

The default config file is located at `ale/config.yml` and is copied into your home directory at `.ale/config.yml` on first use of the library. The config file can be modified using a text editor. `ale.config` is loaded into memory as a dictionary. It is used to find metakernels for different missions. For example, there is an entry for cassini that points to `/usgs/cpkgs/isis3/data/cassini/kernels/mk/` by default. If you want to use your own metakernels, you will need to udpate this path. For example, if the metakernels are located in `/data/cassini/mk/` the cassini entry should be updated with this path. If you are using the default metakernels, then you do not need to update the path.

ALE has a two step process for writing out an ISD: 1. Instantiate your driver (in this case `CassiniIssPds3LabelNaifSpiceDriver`) within a context and 2. pass the driver object into a formatter (in this case, `to_usgscsm`).

Requirements:
 * A PDS3 Cassini ISS image
 * NAIF metakernels installed
 * Config file path for Cassini (ale.config.cassini) pointing to the Cassini NAIF metakernel directory
 * A conda environment with ALE installed into it usisng the `conda install` command or created using the environment.yml file at the base of ALE.

%% Cell type:code id: tags:

``` python
# printing config displays the yaml formatted string
print(ale.config)

# config object is a dictionary so it has the same access patterns
print('Cassini spice directory:', ale.config['cassini'])

# updating config for new LRO path in this notebook
# Note: this will not change the path in `.ale/config.yml`. This change only lives in the notebook.
# ale.config['cassini'] = '/data/cassini/mk/'
```

%% Output

    cassini: /usgs/cpkgs/isis3/data/cassini/kernels/mk/
    dawn: /data/spice/dawn-m_a-spice-6-v1.0/dawnsp_1000/extras/mk
    kaguya: /data/spice/SELENE/kernels/mk/
    lro: /scratch/jlaura/spice/lro-l-spice-6-v1.0/lrosp_1000/extras/mk/
    mdis: /data/spice/mess-e_v_h-spice-6-v1.0/messsp_1000/extras/mk
    mro: /data/spice/mro-m-spice-6-v1.0/mrosp_1000/extras/mk
    spice_root: /data/spice/
    
    Cassini spice directory: /usgs/cpkgs/isis3/data/cassini/kernels/mk/

%% Cell type:code id: tags:

``` python
# change to desired PDS3 image path
file_name = '/home/kberry/dev/ale/ale/N1702360370_1.LBL'

# metakernels are furnished when entering the context (with block) with a driver instance
# most driver constructors simply accept an image path
with CassiniIssPds3LabelNaifSpiceDriver(file_name) as driver:
    # pass driver instance into formatter function
    usgscsmString = to_usgscsm(driver)
```

%% Cell type:markdown id: tags:

### Write ISD to disk

ALE formatter functions generally return bytes or a string that can be written out to disk. ALE's USGSCSM formatter function returns a JSON encoded string that can be written out using any JSON library.

USGSCSM requires the ISD to be colocated with the image file with a `.json` extension in place of the image extension.

%% Cell type:code id: tags:

``` python
# Load the json string into a dict
usgscsm_dict = json.loads(usgscsmString)

# Write the dict out to the associated file
json_file = os.path.splitext(file_name)[0] + '.json'

# Save off the json and read it back in to check if
# the json exists and was formatted correctly
with open(json_file, 'w') as fp:
    json.dump(usgscsm_dict, fp)

with open(json_file, 'r') as fp:
    usgscsm_dict = json.load(fp)

usgscsm_dict.keys()
```

%% Output

    dict_keys(['radii', 'sensor_position', 'sun_position', 'sensor_orientation', 'detector_sample_summing', 'detector_line_summing', 'focal_length_model', 'detector_center', 'starting_detector_line', 'starting_detector_sample', 'focal2pixel_lines', 'focal2pixel_samples', 'optical_distortion', 'image_lines', 'image_samples', 'name_platform', 'name_sensor', 'reference_height', 'name_model', 'center_ephemeris_time'])
+90 −0
Original line number Diff line number Diff line
%% Cell type:markdown id: tags:

# Writing out a USGSCSM ISD from a PDS3 Messenger MDIS image

%% Cell type:code id: tags:

``` python
import ale
from ale.drivers.messenger_drivers import MessengerMdisPds3NaifSpiceDriver
from ale.formatters.usgscsm_formatter import to_usgscsm
import json
import os
```

%% Cell type:markdown id: tags:

## Instantiating an ALE driver

ALE drivers are objects that define how to acquire common ISD keys from an input image format, in this case we are reading in a PDS3 image using NAIF SPICE kernels for exterior orientation data. If the driver utilizes NAIF SPICE kernels, it is implemented as a [context manager](https://docs.python.org/3/reference/datamodel.html#context-managers) and will furnish metakernels when entering the context (i.e. when entering the `with` block) and free the metakernels on exit. This maintains the integrity of spicelib's internal data structures. These driver objects are short-lived and are input to a formatter function that consumes the API to create a serializable file format. `ale.formatters` contains available formatter functions.

The default config file is located at `ale/config.yml` and is copied into your home directory at `.ale/config.yml` on first use of the library. The config file can be modified using a text editor. `ale.config` is loaded into memory as a dictionary. It is used to find metakernels for different missions. For example, there is an entry for MDIS that points to `/scratch/jlaura/spice/mess-e_v_h-spice-6-v1.0/messsp_1000/extras/mk` by default. If you want to use your own metakernels, you will need to update this path. For example, if the metakernels are located in `/data/mdis/mk/` the MDIS entry should be updated with this path. If you are using the default metakernels, then you do not need to update the path.

ALE has a two step process for writing out an ISD: 1. Instantiate your driver (in this case `MessengerMdisPds3NaifSpiceDriver`) within a context and 2. pass the driver object into a formatter (in this case, `to_usgscsm`).

Requirements:
 * A PDS3 Messenger MDIS image
 * NAIF metakernels installed
 * Config file path for MDIS (ale.config.mdis) pointing to MDIS NAIF metakernel directory
 * A conda environment with ALE installed into it usisng the `conda install` command or created using the environment.yml file at the base of ALE.

%% Cell type:code id: tags:

``` python
# printing config displays the yaml formatted string
print(ale.config)

# config object is a dictionary so it has the same access patterns
print('MDIS spice directory:', ale.config['mdis'])

# updating config for new MDIS path in this notebook
# Note: this will not change the path in `.ale/config.yml`. This change only lives in the notebook.
# ale.config['mdis'] = '/data/mdis/mk/'
```

%% Output

    cassini: /usgs/cpkgs/isis3/data/cassini/kernels/mk/
    dawn: /data/spice/dawn-m_a-spice-6-v1.0/dawnsp_1000/extras/mk
    kaguya: /data/spice/SELENE/kernels/mk/
    lro: /scratch/jlaura/spice/lro-l-spice-6-v1.0/lrosp_1000/extras/mk/
    mdis: /scratch/jlaura/spice/mess-e_v_h-spice-6-v1.0/messsp_1000/extras/mk
    mro: /data/spice/mro-m-spice-6-v1.0/mrosp_1000/extras/mk
    spice_root: /data/spice/
    
    MDIS spice directory: /scratch/jlaura/spice/mess-e_v_h-spice-6-v1.0/messsp_1000/extras/mk

%% Cell type:code id: tags:

``` python
# change to desired PDS3 image path
fileName = 'EN1072174528M.IMG'

# metakernels are furnished when entering the context (with block) with a driver instance
# most driver constructors simply accept an image path
with MessengerMdisPds3NaifSpiceDriver(fileName) as driver:
    # pass driver instance into formatter function
    usgscsmString = to_usgscsm(driver)
```

%% Cell type:markdown id: tags:

### Write ISD to disk

ALE formatter functions generally return bytes or a string that can be written out to disk. ALE's USGSCSM formatter function returns a JSON encoded string that can be written out using any JSON library.

USGSCSM requires the ISD to be colocated with the image file with a `.json` extension in place of the image extension.

%% Cell type:code id: tags:

``` python
# load the json encoded string ISD
usgscsm_dict = json.loads(usgscsmString)

# strip the image file extension and append .json
jsonFile = os.path.splitext(fileName)[0] + '.json'

# write to disk
with open(jsonFile, 'w') as fp:
    json.dump(usgscsm_dict, fp)
```
Loading