Exemplo n.º 1
0
def fi_yaml(image, **options):
    cube = parse_file_label(image)
    label = cube['IsisCube']
    orbit = label['Archive']['OrbitNumber']
    scale = label['Mapping']['PixelResolution']
    time  = label['Instrument']['StartTime'].replace('T',' ')

    with TempImage(image, 'campt') as campt:
        isis.campt(from_=image, to=campt)
        label = parse_file_label(campt)

    points = label['GroundPoint']
    clon  = points['PositiveEast360Longitude']
    clat  = points['PlanetocentricLatitude']

    data = {
        ':release':    'YYYY-MM-DD 10:00:00.00 +00:00',
        ':title':      title,
        ':timestamp':  '%s +00:00' % time,
        ':orbit':      orbit,
        ':clat':       '%.3f°' % clat,
        ':clon':       '%.3f°' % clon,
        ':resolution': '%.2f m/pixel' % scale['value'],
        ':mode':       'Native',
        ':ptif':       str(image.tif),
        ':thumb':      str(image.png)
    }

    with open(rename(image, '.yml', '.proj.cub'), 'w') as fp:
        yaml.dump(data, fp, default_flow_style=False)
Exemplo n.º 2
0
def create_yml(image, title):
    """ This function generates a yml file with information.
    Args:
        image:
        title:
    """
    cube = parse_file_label(image.proj.cub)
    label = cube['IsisCube']
    print label
    orbit = label['Archive']['OrbitNumber']
    scale = label['Mapping']['PixelResolution']
    time  = label['Instrument']['StartTime'].replace('T',' ')

    isis.campt(from_=image, to=image.campt)
    label = parse_file_label(image.campt)

    points = label['GroundPoint']
    clon  = points['PositiveEast360Longitude']
    clat  = points['PlanetocentricLatitude']

    data = {
        ':release':    'YYYY-MM-DD 10:00:00.00 +00:00',
        ':title':      title,
        ':timestamp':  '%s +00:00' % time,
        ':orbit':      orbit,
        ':clat':       '%.3f°' % clat,
        ':clon':       '%.3f°' % clon,
        ':resolution': '%.2f m/pixel' % scale['value'],
        ':mode':       'Native',
        ':ptif':       str(image.tif),
        ':thumb':      str(image.png)
    }

    with open(image.yml, 'w') as yaml_file:
        yaml.dump(data, yaml_file, default_flow_style=False)
Exemplo n.º 3
0
def do_campt(mosaicname, savepath, temppath):
    print("Calling do_campt")
    try:
        campt(from_=mosaicname, to=savepath, format='flat', append='no',
              coordlist=temppath, coordtype='image')
    except ProcessError as e:
        print(e.stderr)
        return mosaicname, False
Exemplo n.º 4
0
def do_campt(mosaicname, savepath, temppath):
    print("Calling do_campt")
    try:
        campt(from_=mosaicname,
              to=savepath,
              format='flat',
              append='no',
              coordlist=temppath,
              coordtype='image')
    except ProcessError as e:
        print(e.stderr)
        return mosaicname, False
Exemplo n.º 5
0
def get_campt_label(frompath, sample, line):
    try:
        group = pvl.load(campt(from_=str(frompath), sample=sample,
                               line=line)).get('GroundPoint')
    except ProcessError as e:
        print(e.stdout)
        print(e.stderr)
        raise e
    else:
        return group
Exemplo n.º 6
0
def get_campt_label(frompath, sample, line):
    try:
        group = pvl.load(campt(from_=str(frompath),
                               sample=sample,
                               line=line)).get('GroundPoint')
    except ProcessError as e:
        print(e.stdout)
        print(e.stderr)
        raise e
    else:
        return group
Exemplo n.º 7
0
def crop_latlon(*,
                center_lat: float,
                center_lon: float,
                nsamples,
                nlines,
                to_cube,
                from_cube,
                pad=None,
                failed_list_to=None):
    print('cropping {}'.format(from_cube))
    try:
        center_campt = pvl.loads(
            campt(from_=from_cube,
                  type="ground",
                  latitude=center_lat,
                  longitude=center_lon))
        catlab_data = pvl.loads(catlab(from_=from_cube))

    except ProcessError as e:
        print(e.stderr, e.stdout)

    center_pixel = (center_campt['GroundPoint']['Line'],
                    center_campt['GroundPoint']['Sample'])

    #If user asked for max width, start at left edge
    if nsamples == 'max':
        nw_pixel = [center_pixel[0] - int(nlines) / 2, 1]
        nsamples = int(
            catlab_data['IsisCube']['Core']['Dimensions']['Samples'])
    else:
        nw_pixel = [
            center_pixel[0] - int(nlines) / 2,
            center_pixel[1] - int(nsamples) / 2
        ]

    try:
        print("sample:{} line:{}".format(nw_pixel[1], nw_pixel[0]))
        crop(from_=from_cube,
             sample=int(nw_pixel[1]),
             line=int(nw_pixel[0]),
             nsamples=nsamples,
             nlines=nlines,
             to=to_cube)
    except ProcessError as e:
        print(e.stderr, e.stdout)
        if failed_list_to:
            with open(failed_list_to, 'a') as failed_list:
                failed_list.write(' {}, {} \n'.format(from_cube, e.stderr))
Exemplo n.º 8
0
def campt_header(outcube):
    """
    Compute the incidence angle at the center of the image and the local
    solar time.  These are required by the Davinci processing pipeline to
    determine what processing to perform.
    """

    workingpath, fname = os.path.split(outcube)
    fname = os.path.splitext(fname)[0]

    header = pvl.load(outcube)
    samples = find_in_dict(header, 'Samples')
    lines = find_in_dict(header, 'Lines')

    coordinatelist = os.path.join(workingpath, 'coordinatelist.lis')
    with open(coordinatelist, 'w') as f:
        f.write('{},{}\n'.format(samples / 2, lines / 2))
        f.write('1,1\n')  #UpperLeft
        f.write('{},{}\n'.format(samples - 1, lines - 1))  #LowerRight
    campt = pvl.loads(
        isis.campt(from_=outcube,
                   to=os.path.join(workingpath, fname + '_campt.pvl'),
                   usecoordlist='yes',
                   coordlist=coordinatelist,
                   coordtype='image'))
    for j, g in enumerate(campt.items()):
        if j == 0:
            #Incidence at the center of the image
            try:
                incidence = g[1]['Incidence'].value
            except:
                incidence = g[1]['Incidence']
        elif j == 1:
            #Upper Left Corner Pixel
            stoplocaltime = g[1]['LocalSolarTime'].value
        elif j == 2:
            #Lower Right Corner Pixel
            startlocaltime = g[1]['LocalSolarTime'].value
    return incidence, stoplocaltime, startlocaltime
Exemplo n.º 9
0
def themis_pairs(root, id1, id2):
    def stats(arr, additional_funcs=[]):
        return {
            'mean': float(np.mean(arr)),
            'min': float(np.min(arr)),
            'max': float(np.max(arr)),
            'stddev': float(np.std(arr))
        }

    # enforce ID1 < ID2
    id1, id2 = sorted([id1, id2])

    data_dir = config.data
    themis_dir1 = os.path.join(data_dir, "THEMIS", id1[0], id1[1], id1)
    themis_dir2 = os.path.join(data_dir, "THEMIS", id2[0], id2[1], id2)
    pair_dir = os.path.join(data_dir, "THEMIS_PAIRS", id1, id2)

    map_file = config.themis.map_file
    if not os.path.isfile(map_file):
        raise Exception("{} does not exist.".format(map_file))


    pair_original_path = os.path.join(pair_dir, 'original')
    pair_images_path = os.path.join(pair_dir, 'imagedata')
    bundle_result_path = os.path.join(pair_dir, 'bundle')
    plot_path = os.path.join(pair_dir, 'plots')

    img1_path = os.path.join(themis_dir1, 'original', 'l1.cub')
    img2_path = os.path.join(themis_dir2, 'original', 'l1.cub')

    img1_cropped_path = os.path.join(pair_original_path, 'source.l1.cub')
    img2_cropped_path = os.path.join(pair_original_path, 'destination.l1.cub')

    img1_projected_path = os.path.join(pair_original_path, 'source.l2.cub')
    img2_projected_path = os.path.join(pair_original_path, 'destination.l2.cub')

    img1_projected_bt_path = os.path.join(pair_original_path, 'source.l2.bt.cub')
    img2_projected_bt_path = os.path.join(pair_original_path, 'destination.l2.bt.cub')

    img2_matchmapped_path =  os.path.join(pair_original_path, 'destination.l2.mm.cub')
    img2_matchmapped_bt_path = os.path.join(pair_original_path, 'destination.l2.bt.mm.cub')

    cubelis = os.path.join(pair_dir, 'filelist.txt')

    cnet_path = os.path.join(bundle_result_path, 'cnet.net')

    autocnet_plot_path = os.path.join(plot_path, 'autocnet.tif')
    histogram_plot_path = os.path.join(plot_path, 'hist.tif')
    overlap_plot_path = os.path.join(plot_path, 'overlap.tif')

    img1_b9_path = os.path.join(pair_images_path, 'source.b9.tif')
    img2_b9_path = os.path.join(pair_images_path, 'destination.b9.tif')
    img1_b9_bt_path = os.path.join(pair_images_path, 'source.b9.bt.tif')
    img2_b9_bt_path = os.path.join(pair_images_path, 'destination.b9.bt.tif')

    rad_diff_image = os.path.join(pair_images_path, 'rad_diff.tif')
    bt_diff_image = os.path.join(pair_images_path, 'bt_diff.tif')

    logger.info('Making directories {} and {}'.format(pair_original_path, pair_images_path))
    os.makedirs(pair_original_path, exist_ok=True)
    os.makedirs(pair_images_path, exist_ok=True)
    os.makedirs(bundle_result_path, exist_ok=True)
    os.makedirs(plot_path, exist_ok=True)

    # write out cubelist
    with open(cubelis, 'w') as f:
        f.write(img1_cropped_path + '\n')
        f.write(img2_cropped_path + '\n')

    logger.info('IDs: {} {}'.format(id1, id2))
    logger.info('DATA DIR: {}'.format(data_dir))
    logger.info('IMAGE 1 PATH: {}'.format(img1_path))
    logger.info('IMAGE 2 PATH: {}'.format(img2_path))
    logger.info('PAIR OG DIR: {}'.format(pair_original_path))
    logger.info('PAIR IMAGE PATH: {}'.format(pair_images_path))
    logger.info('PAIR DIR: {}'.format(pair_dir))

    img1_smithed = False
    img2_smithed = False

    img1_smithed = utils.preprocess(id1, themis_dir1, day=True, validate=True, gtiffs=False, projected_images=False)
    img2_smithed = utils.preprocess(id2, themis_dir2, day=True, validate=True, gtiffs=False, projected_images=False)

    img1_fh = GeoDataset(img1_path)
    img2_fh = GeoDataset(img2_path)

    # minLat maxLat minLon maxLon
    minLat, maxLat, _, _ = img1_fh.footprint.Intersection(img2_fh.footprint).GetEnvelope()
    utils.thm_crop(img1_path, img1_cropped_path, minLat, maxLat)
    utils.thm_crop(img2_path, img2_cropped_path, minLat, maxLat)

    del (img1_fh, img2_fh)

    used_smithed = True
    if not (img1_smithed and img2_smithed):
        logger.info("No smithed kernels found, matching with Autocnet.")
        used_smithed = False
        cg = utils.match_pair(img1_cropped_path, img2_cropped_path, figpath=autocnet_plot_path)
        cg.generate_control_network()
        cg.to_isis(os.path.splitext(cnet_path)[0])

        bundle_parameters = config.themis.bundle_parameters
        bundle_parameters['from_'] = cubelis
        bundle_parameters['cnet'] = cnet_path
        bundle_parameters['onet'] = cnet_path
        bundle_parameters['file_prefix'] = bundle_result_path+'/'
        logger.info("Running Jigsaw, parameters:\n")
        utils.print_dict(bundle_parameters)
        try:
            jigsaw(**bundle_parameters)
        except ProcessError as e:
            logger.error("STDOUT: {}".format(e.stdout.decode('utf-8')))
            logger.error("STDERR: {}".format(e.stderr.decode('utf-8')))
            raise Exception("Jigsaw Error")

    try:
        map_pvl = pvl.load(map_file)
    except Exception as e:
        logger.error("Error loading mapfile {}:\n{}".format(map_file, e))

    logger.info('Projecting {} to {} with map file:\n {}'.format(img1_cropped_path, img1_projected_path, map_pvl))
    utils.project(img1_cropped_path, img1_projected_path, map_file)

    logger.info('Projecting {} to {} with map file:\n {}'.format(img2_cropped_path, img2_projected_path, map_pvl))
    utils.project(img2_cropped_path, img2_projected_path, map_file)

    img1_footprint = GeoDataset(img1_projected_path).footprint
    img2_footprint = GeoDataset(img2_projected_path).footprint
    overlap_geom = img2_footprint.Intersection(img1_footprint)

    try:
        out1, err1 = utils.run_davinci('thm_tb.dv', img1_projected_path, img1_projected_bt_path)
        out2, err2 = utils.run_davinci('thm_tb.dv', img2_projected_path, img2_projected_bt_path)
    except Exception as e:
        logger.error(e)

    try:
        out1, err1 = utils.run_davinci('thm_post_process.dv', img1_projected_bt_path, img1_projected_bt_path)
        out2, err2 = utils.run_davinci('thm_post_process.dv', img2_projected_bt_path, img2_projected_bt_path)

        out1, err1 = utils.run_davinci('thm_bandselect.dv', img1_projected_bt_path, img1_projected_bt_path, args=['band=9'])
        out2, err2 = utils.run_davinci('thm_bandselect.dv', img2_projected_bt_path, img2_projected_bt_path, args=['band=9'])
    except Exception as e:
        logger.error(e)

    try:
        out1, err1 = utils.run_davinci('thm_post_process.dv', img1_projected_path, img1_projected_path)
        out2, err2 = utils.run_davinci('thm_post_process.dv', img2_projected_path, img2_projected_path)

        out1, err1 = utils.run_davinci('thm_bandselect.dv', img1_projected_path, img1_projected_path, args=['band=9'])
        out2, err2 = utils.run_davinci('thm_bandselect.dv', img2_projected_path, img2_projected_path, args=['band=9'])
    except Exception as e:
        logger.error(e)


    footprintinit(from_=img2_projected_bt_path)
    footprintinit(from_=img2_projected_path)

    logger.info('Creating matchmapped cubes')
    utils.project(img2_projected_path, img2_matchmapped_path, img1_projected_path, matchmap=True)
    utils.project(img2_projected_bt_path, img2_matchmapped_bt_path, img1_projected_bt_path, matchmap=True)

    img1_projected = GeoDataset(img1_projected_path)
    img2_projected = GeoDataset(img2_matchmapped_path)

    arr1 = img1_projected.read_array()
    arr2 = img2_projected.read_array()

    arr1[arr1 == pysis.specialpixels.SPECIAL_PIXELS['Real']['Null']] = 0
    arr2[arr2 == pysis.specialpixels.SPECIAL_PIXELS['Real']['Null']] = 0
    arr1[arr1 == -32768.] = 0
    arr2[arr2 == -32768.] = 0

    arr1 = np.ma.MaskedArray(arr1, arr1 == 0)
    arr2 = np.ma.MaskedArray(arr2, arr2 == 0)

    img1_b9_overlap = np.ma.MaskedArray(arr1.data, arr1.mask | arr2.mask)
    img2_b9_overlap = np.ma.MaskedArray(arr2.data, arr1.mask | arr2.mask)
    rad_diff = np.ma.MaskedArray(img1_b9_overlap.data-img2_b9_overlap.data, arr1.mask | arr2.mask)

    img1rads = img1_b9_overlap[~img1_b9_overlap.mask]
    img2rads = img2_b9_overlap[~img2_b9_overlap.mask]

    img1_b9_overlap.data[img1_b9_overlap.mask] = 0
    img2_b9_overlap.data[img2_b9_overlap.mask] = 0
    rad_diff.data[rad_diff.mask] = 0

    # logger.info('Writing {}'.format(img1_b9_path))
    # ds = utils.array2raster(img1_projected_path, img1_b9_overlap, img1_b9_path)
    # del ds
    #
    # logger.info('Writing {}'.format(img2_b9_path))
    # ds = utils.array2raster(img2_projected_path, img2_b9_overlap, img2_b9_path)
    # del ds

    logger.info('Writing {}'.format(rad_diff_image))
    ds = utils.array2raster(img1_projected_path, rad_diff, rad_diff_image)
    del ds

    img1_bt_projected = GeoDataset(img1_projected_bt_path)
    img2_bt_projected = GeoDataset(img2_matchmapped_bt_path)

    arr1 = img1_bt_projected.read_array()
    arr2 = img2_bt_projected.read_array()
    arr1[arr1 == pysis.specialpixels.SPECIAL_PIXELS['Real']['Null']] = 0
    arr2[arr2 == pysis.specialpixels.SPECIAL_PIXELS['Real']['Null']] = 0
    arr1[arr1 == -32768.] = 0
    arr2[arr2 == -32768.] = 0

    arr1 = np.ma.MaskedArray(arr1, arr1 == 0)
    arr2 = np.ma.MaskedArray(arr2, arr2 == 0)

    img1_b9_bt_overlap = np.ma.MaskedArray(arr1.data, arr1.mask | arr2.mask)
    img2_b9_bt_overlap = np.ma.MaskedArray(arr2.data, arr1.mask | arr2.mask)
    bt_diff = np.ma.MaskedArray(img1_b9_bt_overlap.data-img2_b9_bt_overlap.data, arr1.mask | arr2.mask)

    img1bt = img1_b9_bt_overlap[~img1_b9_bt_overlap.mask]
    img2bt = img2_b9_bt_overlap[~img2_b9_bt_overlap.mask]

    img1_b9_bt_overlap.data[img1_b9_bt_overlap.mask] = 0
    img2_b9_bt_overlap.data[img2_b9_bt_overlap.mask] = 0
    bt_diff.data[bt_diff.mask] = 0

    # logger.info('Writing {}'.format(img1_b9_bt_path))
    # ds = utils.array2raster(img1_projected_bt_path, img1_b9_bt_overlap, img1_b9_bt_path)
    # del ds
    #
    # logger.info('Writing {}'.format(img2_b9_bt_path))
    # ds = utils.array2raster(img2_projected_bt_path, img2_b9_bt_overlap, img2_b9_bt_path)
    # del ds

    logger.info('Writing {}'.format(bt_diff_image))
    ds = utils.array2raster(img1_projected_bt_path, bt_diff, bt_diff_image)
    del ds

    img1_campt = pvl.loads(campt(from_=img1_path))['GroundPoint']
    img2_campt = pvl.loads(campt(from_=img1_path))['GroundPoint']

    img1_date = GeoDataset(img1_path).metadata['IsisCube']['Instrument']['StartTime']
    img2_date = GeoDataset(img2_path).metadata['IsisCube']['Instrument']['StartTime']

    metadata = {}
    metadata['img1'] = {}
    metadata['img1']['rad'] = stats(img1rads)
    metadata['img1']['tb'] = stats(img1bt)
    metadata['img1']['emission_angle'] = img1_campt['Emission'].value
    metadata['img1']['incidence_angle'] = img1_campt['Incidence'].value
    metadata['img1']['solar_lon'] = img1_campt['SolarLongitude'].value
    metadata['img1']['date'] = {
        'year' : img1_date.year,
        'month' : img1_date.month,
        'day': img1_date.day
    }

    metadata['img2'] = {}
    metadata['img2']['rad'] = stats(img2rads)
    metadata['img2']['tb'] = stats(img1bt)
    metadata['img2']['emission_angle'] = img2_campt['Emission'].value
    metadata['img2']['incidence_angle'] = img2_campt['Incidence'].value
    metadata['img2']['solar_lon'] = img2_campt['SolarLongitude'].value
    metadata['img2']['date'] = {
        'year' : img2_date.year,
        'month' : img2_date.month,
        'day': img2_date.day
    }

    metadata['diff'] = {}
    metadata['diff']['rad'] = stats(rad_diff)
    metadata['diff']['tb'] = stats(bt_diff)
    metadata['diff']['date(days)'] = (img1_date - img2_date).days
    metadata['id1'] = id1
    metadata['id2'] = id2

    metadata['plots'] = {}
    metadata['plots']['rad_hist'] = os.path.join(plot_path, 'rad_hist.png')
    metadata['plots']['tb_hist'] = os.path.join(plot_path, 'tb_hist.png')
    metadata['plots']['diff_hist'] = os.path.join(plot_path, 'diff_hist.png')
    metadata['plots']['match_plot'] = autocnet_plot_path

    if not used_smithed:
        metadata['plots']['matching_plot'] = autocnet_plot_path
        metadata['bundle'] = {}
        for f in glob(os.path.join(bundle_result_path, '*')):
            metadata['bundle'][os.path.basename(os.path.splitext(f)[0])] = f


        try:
            df = pd.read_csv(metadata['bundle']['residuals'], header=1)
        except:
            df = pd.read_csv(metadata['bundle']['_residuals'], header=1)

        metadata['bundle']['residual_stats'] = stats(np.asarray(df['residual.1'][1:], dtype=float))

    utils.print_dict(metadata)

    plt.figure(figsize=(25,10))
    bins = sns.distplot(img1rads[~img1rads.mask], kde=False, norm_hist=False, label='{} {}'.format(id1, os.path.basename(img1_b9_path)))
    bins = sns.distplot(img2rads[~img2rads.mask], kde=False, norm_hist=False, label='{} {}'.format(id2,os.path.basename(img2_b9_path)))
    bins.set(xlabel='radiance', ylabel='counts')
    plt.legend()
    plt.savefig(metadata['plots']['rad_hist'])
    plt.close()

    plt.figure(figsize=(25,10))
    bins = sns.distplot(img1bt[~img1bt.mask], kde=False, norm_hist=False, label='{} {}'.format(id1, os.path.basename(img1_b9_bt_path)))
    bins = sns.distplot(img2bt[~img2bt.mask], kde=False, norm_hist=False, label='{} {}'.format(id2, os.path.basename(img2_b9_bt_path)))
    bins.set(xlabel='Brightness Temp', ylabel='counts')
    plt.legend()
    plt.savefig(metadata['plots']['tb_hist'])
    plt.close()

    plt.figure(figsize=(25,10))
    diffplot = sns.distplot(rad_diff[~rad_diff.mask],  kde=False)
    diffplot.set(xlabel='Delta Radiance', ylabel='counts')
    plt.savefig(metadata['plots']['diff_hist'])
    plt.close()

    metadata_path = os.path.join(pair_dir, 'metadata.json')
    json.dump(metadata,open(metadata_path, 'w+'), default=utils.date_converter)

    index_path = os.path.join(pair_dir, 'index.json')

    index = {}
    print(GeoDataset(img1_cropped_path).footprint.ExportToWkt())
    print(GeoDataset(img2_cropped_path).footprint.ExportToWkt())

    index['overlap_geom'] = overlap_geom.ExportToWkt()
    index['img1_geom'] =  img1_footprint.ExportToWkt()
    index['img2_geom'] =  img2_footprint.ExportToWkt()
    index['id'] = '{}_{}'.format(id1, id2)
    json.dump(index, open(index_path, 'w+'))

    utils.print_dict(index)
    logger.info("Complete")
Exemplo n.º 10
0
def point_info(cube_path, x, y, point_type, allow_outside=False):
    """
    Use Isis's campt to get image/ground point info from an image

    Parameters
    ----------
    cube_path : str
                path to the input cube

    x : float
        point in the x direction. Either a sample or a longitude value
        depending on the point_type flag

    y : float
        point in the y direction. Either a line or a latitude value
        depending on the point_type flag

    point_type : str
                 Options: {"image", "ground"}
                 Pass "image" if  x,y are in image space (sample, line) or
                 "ground" if in ground space (longitude, lattiude)

    Returns
    -------
    : PvlObject
      Pvl object containing campt returns
    """
    point_type = point_type.lower()

    if point_type not in {"image", "ground"}:
        raise Exception(f'{point_type} is not a valid point type, valid types are ["image", "ground"]')

    if isinstance(x, Number) and isinstance(y, Number):
        x, y = [x], [y]

    if point_type == "image":
        # convert to ISIS pixels
        x = np.add(x, .5)
        y = np.add(y, .5)

    if pvl.load(cube_path).get("IsisCube").get("Mapping"):
      pvlres = []
      # We have a projected image
      for x,y in zip(x,y):
        try:
          if point_type.lower() == "ground":
            pvlres.append(isis.mappt(from_=cube_path, longitude=x, latitude=y, allowoutside=allow_outside, coordsys="UNIVERSAL", type_=point_type))
          elif point_type.lower() == "image":
            pvlres.append(isis.mappt(from_=cube_path, sample=x, line=y, allowoutside=allow_outside, type_=point_type))
        except ProcessError as e:
          print(f"CAMPT call failed, image: {cube_path}\n{e.stderr}")
          return
      dictres = [dict(pvl.loads(res)["Results"]) for res  in pvlres]
      if len(dictres) == 1:
        dictres = dictres[0]

    else:
      with tempfile.NamedTemporaryFile("w+") as f:
         # ISIS's campt wants points in a file, so write to a temp file
         if point_type == "ground":
            # campt uses lat, lon for ground but sample, line for image.
            # So swap x,y for ground-to-image calls
            x,y = y,x


         f.write("\n".join(["{}, {}".format(xval,yval) for xval,yval in zip(x, y)]))
         f.flush()
         try:
            pvlres = isis.campt(from_=cube_path, coordlist=f.name, allowoutside=allow_outside, usecoordlist=True, coordtype=point_type)
         except ProcessError as e:
            warn(f"CAMPT call failed, image: {cube_path}\n{e.stderr}")
            return

         pvlres = pvl.loads(pvlres)
         dictres = []
         if len(x) > 1 and len(y) > 1:
            for r in pvlres:
                if r['GroundPoint']['Error'] is not None:
                    raise ProcessError(returncode=1, cmd=['pysis.campt()'], stdout=r, stderr=r['GroundPoint']['Error'])
                    return
                else:
                    # convert all pixels to PLIO pixels from ISIS
                    r[1]["Sample"] -= .5
                    r[1]["Line"] -= .5
                    dictres.append(dict(r[1]))
         else:
            if pvlres['GroundPoint']['Error'] is not None:
                raise ProcessError(returncode=1, cmd=['pysis.campt()'], stdout=pvlres, stderr=pvlres['GroundPoint']['Error'])
                return
            else:
                pvlres["GroundPoint"]["Sample"] -= .5
                pvlres["GroundPoint"]["Line"] -= .5
                dictres = dict(pvlres["GroundPoint"])
    return dictres
Exemplo n.º 11
0
def point_info(cube_path, x, y, point_type, allow_outside=False):
    """
    Use Isis's campt to get image/ground point info from an image

    Parameters
    ----------
    cube_path : str
                path to the input cube

    x : float
        point in the x direction. Either a sample or a longitude value
        depending on the point_type flag

    y : float
        point in the y direction. Either a line or a latitude value
        depending on the point_type flag

    point_type : str
                 Options: {"image", "ground"}
                 Pass "image" if  x,y are in image space (sample, line) or
                 "ground" if in ground space (longitude, lattiude)

    Returns
    -------
    : PvlObject
      Pvl object containing campt returns
    """
    point_type = point_type.lower()

    if point_type not in {"image", "ground"}:
        raise Exception(
            f'{point_type} is not a valid point type, valid types are ["image", "ground"]'
        )

    if isinstance(x, Number) and isinstance(y, Number):
        x, y = [x], [y]

    with tempfile.NamedTemporaryFile("w+") as f:
        # ISIS wants points in a file, so write to a temp file
        if point_type == "ground":
            # campt uses lat, lon for ground but sample, line for image.
            # So swap x,y for ground-to-image calls
            x, y = y, x
        elif point_type == "image":
            # convert to ISIS pixels
            x = np.add(x, .5)
            y = np.add(y, .5)

        f.write("\n".join(
            ["{}, {}".format(xval, yval) for xval, yval in zip(x, y)]))
        f.flush()

        with tempfile.NamedTemporaryFile("r+") as campt_output:
            try:
                isis.campt(from_=cube_path,
                           coordlist=f.name,
                           allowoutside=allow_outside,
                           usecoordlist=True,
                           coordtype=point_type,
                           to=campt_output.name)
            except ProcessError as e:
                warn(f"CAMPT call failed, image: {cube_path}\n{e.stderr}")
                return

            pvlres = pvl.load(campt_output.name)

        if len(x) > 1 and len(y) > 1:
            for r in pvlres:
                # convert all pixels to PLIO pixels from ISIS
                r[1]["Sample"] -= .5
                r[1]["Line"] -= .5
        else:
            pvlres["GroundPoint"]["Sample"] -= .5
            pvlres["GroundPoint"]["Line"] -= .5
    return pvlres
Exemplo n.º 12
0
def preprocess(thm_id,
               outdir,
               day=True,
               validate=False,
               projected_images=True,
               map_file=config.themis.map_file,
               originals=True,
               gtiffs=False,
               meta=True,
               index=True):
    '''
    Downloads Themis file by ID and runs it through spice init and
    footprint init.
    '''
    original = os.path.join(outdir, 'original')
    images = os.path.join(outdir, 'images')

    ogcube = os.path.join(original, 'l1.cub')
    projcube = os.path.join(original, 'l2.cub')
    metafile = os.path.join(outdir, 'meta.json')
    indexfile = os.path.join(outdir, 'index.json')

    os.makedirs(original, exist_ok=True)
    os.makedirs(images, exist_ok=True)

    kerns = get_controlled_kernels(thm_id)

    if os.path.exists(outdir) and os.path.exists(original) and os.path.exists(
            metafile) and os.path.exists(indexfile):
        logger.info("File {} Exists, skipping redownload.".format(outdir))
        return bool(kerns)

    if originals:
        if day:
            out, err = run_davinci('thm_pre_process.dv',
                                   infile=thm_id,
                                   outfile=ogcube)
        else:
            out, err = run_davinci('thm_pre_process_night.dv',
                                   infile=thm_id,
                                   outfile=ogcube)

        if validate:
            try:
                init(ogcube, additional_kernels=kerns)
                label = pvl.loads(campt(from_=ogcube))
            except ProcessError as e:
                logger.info('campt Error')
                logger.info('file: {}'.format(outfile))
                logger.error("STDOUT: {}".format(e.stdout.decode('utf-8')))
                logger.error("STDERR: {}".format(e.stderr.decode('utf-8')))

            incidence_angle = label['GroundPoint']['Incidence'].value

            if day and incidence_angle > 90:
                logger.info(
                    "incidence angle suggests night, but {} was proccessed for day, reprocessing"
                    .format(thm_id))
                out, err = run_davinci('thm_pre_process_night.dv',
                                       infile=thm_id,
                                       outfile=ogcube)
                init(ogcube, additional_kernels=kerns)
            elif not day and incidence_angle <= 90:
                logger.info(
                    "incidence angle suggests day, but {} was proccessed for night, reprocessing"
                    .format(thm_id))
                out, err = run_davinci('thm_pre_process.dv',
                                       infile=thm_id,
                                       outfile=ogcube)
                init(ogcube, additional_kernels=kerns)

        else:
            init(ogcube, additional_kernels=kerns)

        if projected_images:
            project(ogcube, projcube, map_file)

    img = GeoDataset(ogcube)

    if meta:
        meta = json.loads(
            json.dumps(img.metadata,
                       default=lambda o: str(o)
                       if isinstance(o, datetime) else o))
        try:
            meta['map_file'] = str(pvl.load(map_file))
        except Exception as e:
            logger.error("Failed to load map file {}:\n{}".format(map_file, e))
            raise Exception("Invalid map file.")

        json.dump(meta, open(metafile, 'w+'))
        if kerns:
            logger.info('Used Controlled Kernels')
            meta['used_control_kernels'] = True

    if index:
        date = img.metadata['IsisCube']['Instrument']['StartTime']
        index_meta = {}
        index_meta['geom'] = img.footprint.ExportToWkt()
        index_meta['id'] = thm_id
        index_meta['time'] = {}
        index_meta['time']['year'] = date.year
        index_meta['time']['month'] = date.month
        index_meta['time']['day'] = date.day
        index_meta['time']['hour'] = date.hour
        nbands = img.nbands
        json.dump(index_meta, open(indexfile, 'w+'))

    del img

    if gtiffs:
        for band in range(1, nbands + 1):
            tiffpath = os.path.join(images, 'b{}.tiff'.format(band))
            logger.info('Writing: {}'.format(tiffpath))
            gdal.Translate(tiffpath, ogcube, bandList=[band], format='GTiff')

    return bool(kerns)
Exemplo n.º 13
0
def match_pair(img1_path, img2_path, figpath=None):
    src_points = point_grid(GeoDataset(img1_path), step=50)
    f = open('temp.txt', 'w+')
    f.write('\n'.join('{}, {}'.format(int(x), int(y)) for x, y in src_points))
    del f

    label = pvl.loads(
        campt(from_=img1_path, coordlist='temp.txt', coordtype='image'))
    points = []
    for group in label:
        try:
            lat = group[1]['PlanetocentricLatitude'].value
            lon = group[1]['PositiveEast360Longitude'].value
            points.append([lat, lon])
        except Exception as e:
            continue

    logger.info(
        "{} points from image1 successfully reprojected to image2, rejected {}"
        .format(str(len(points)), str(len(src_points) - len(points))))

    if len(points) == 0:
        raise Exception("No valid points were found for pair {} {}".format(
            img1_path, img2_path))

    f = open('temp.txt', 'w+')
    f.write('\n'.join('{}, {}'.format(x, y) for x, y in points))
    del f

    img2label = pvl.loads(
        campt(from_=img2_path,
              coordlist='temp.txt',
              coordtype='ground',
              allowoutside=False))
    dst_lookup = {}
    for i, group in enumerate(img2label):
        if not group[1]['Error']:
            line = group[1]['Line']
            sample = group[1]['Sample']
            dst_lookup[i] = [sample, line]

    filelist = [img1_path, img2_path]
    cg = CandidateGraph.from_filelist(filelist)

    edge = cg[0][1]['data']
    img1 = GeoDataset(img1_path)
    img2 = GeoDataset(img2_path)

    src_keypoints = pd.DataFrame(data=src_points, columns=['x', 'y'])
    src_keypoints['response'] = 0
    src_keypoints['angle'] = 0
    src_keypoints['octave'] = 0
    src_keypoints['layer'] = 0
    src_keypoints
    edge.source._keypoints = src_keypoints

    results = []
    dst_keypoints = []
    dst_index = 0
    distances = []

    arr1 = img1.read_array()
    arr2 = img2.read_array()
    del img1
    del img2

    for keypoint in edge.source.keypoints.iterrows():
        index, row = keypoint

        sx, sy = row['x'], row['y']

        try:
            dx, dy = dst_lookup[index]
        except KeyError:
            continue

        try:
            ret = refine_subpixel(sx,
                                  sy,
                                  dx,
                                  dy,
                                  arr1,
                                  arr2,
                                  size=50,
                                  reduction=10,
                                  convergence_threshold=1)
        except Exception as ex:
            continue

        if ret is not None:
            x, y, metrics = ret
        else:
            continue

        dist = np.linalg.norm([x - dx, y - dy])
        results.append([0, index, 1, dst_index, dist])
        dst_keypoints.append([x, y, 0, 0, 0, 0, 0])
        dst_index += 1

    matches = pd.DataFrame(data=results,
                           columns=[
                               'source_image', 'source_idx',
                               'destination_image', 'destination_idx',
                               'distance'
                           ])

    if matches.empty:
        logger.error(
            "After matching points, matches dataframe returned empty.")

    dst_keypoints = pd.DataFrame(
        data=dst_keypoints,
        columns=['x', 'y', 'response', 'size', 'angle', 'octave', 'layer'])
    edge.destination._keypoints = dst_keypoints

    edge._matches = matches
    edge.compute_fundamental_matrix()
    distance_check(edge, clean_keys=['fundamental'])

    if figpath:
        plt.figure(figsize=(10, 25))
        cg[0][1]['data'].plot(clean_keys=['fundamental', 'distance'],
                              nodata=-32768.0)
        plt.savefig(figpath)
        plt.close()
    return cg