Esempio n. 1
0
    def __init__(self, file_name):

        GeoDataset.__init__(self, file_name)
        HCube.__init__(self)

        self.derived_funcs = {}

        if libpyhat_enabled:
            self.derived_funcs = get_derived_funcs(crism)
Esempio n. 2
0
    def from_filelist(cls, filelist, basepath=None):
        """
        Instantiate the class using a filelist as a python list.
        An adjacency structure is calculated using the lat/lon information in the
        input images. Currently only images with this information are supported.

        Parameters
        ----------
        filelist : list
                   A list containing the files (with full paths) to construct an adjacency graph from

        Returns
        -------
        : object
          A Network graph object
        """
        if isinstance(filelist, str):
            filelist = io_utils.file_to_list(filelist)
        # TODO: Reject unsupported file formats + work with more file formats
        if basepath:
            datasets = [
                GeoDataset(os.path.join(basepath, f)) for f in filelist
            ]
        else:
            datasets = [GeoDataset(f) for f in filelist]

        # This is brute force for now, could swap to an RTree at some point.
        adjacency_dict = {}
        valid_datasets = []

        for i in datasets:
            adjacency_dict[i.file_name] = []

            fp = i.footprint
            if fp and fp.IsValid():
                valid_datasets.append(i)
            else:
                warnings.warn(
                    'Missing or invalid geospatial data for {}'.format(
                        i.base_name))

        # Grab the footprints and test for intersection
        for i, j in itertools.permutations(valid_datasets, 2):
            i_fp = i.footprint
            j_fp = j.footprint

            try:
                if i_fp.Intersects(j_fp):
                    adjacency_dict[i.file_name].append(j.file_name)
                    adjacency_dict[j.file_name].append(i.file_name)
            except:
                warnings.warn(
                    'Failed to calculate intersection between {} and {}'.
                    format(i, j))
        return cls.from_adjacency(adjacency_dict)
Esempio n. 3
0
def master_isvalid(file):

    if len(gdal.Open(file).GetSubDatasets()) != 17:
        return False

    calibrated_image = GeoDataset('HDF4_SDS:UNKNOWN:"{}":37'.format(file))
    lats = GeoDataset('HDF4_SDS:UNKNOWN:"{}":30'.format(file))
    lons = GeoDataset('HDF4_SDS:UNKNOWN:"{}":31'.format(file))

    res = []
    for ds in [calibrated_image, lats, lons]:
        arr = ds.read_array()
        test = np.empty(arr.shape)
        test[:] = ds.metadata['_FillValue']
        res.append(not (test == arr).all())
    return all(res)
Esempio n. 4
0
def geolocate(infile,
              outfile,
              lats,
              lons,
              dstSRS="EPSG:4326",
              format="GTiff",
              woptions={},
              toptions={}):
    """
    """
    image = gdal.Open(infile, gdal.GA_Update)
    geoloc = {
        'X_DATASET': lons,
        'X_BAND': '1',
        'Y_DATASET': lats,
        'Y_BAND': '1',
        'PIXEL_OFFSET': '0',
        'LINE_OFFSET': '0',
        'PIXEL_STEP': '1',
        'LINE_STEP': '1'
    }

    image.SetMetadata(geoloc, 'GEOLOCATION')
    # explicity close image
    del image
    gdal.Warp(outfile, infile, format=format, dstSRS=dstSRS)
    return GeoDataset(outfile)
Esempio n. 5
0
    def master(root, masterhdf):
        fd = GeoDataset(masterhdf)
        date = datetime.strptime(fd.metadata['CompletionDate'],
                                 "%d-%b-%Y %H:%M:%S")
        line = fd.metadata['FlightLineNumber']
        daytime_flag = fd.metadata['day_night_flag']
        ID = fd.metadata['producer_granule_id'].split('.')[0]

        path = os.path.join(root, 'MASTER', str(date.year), str(line),
                            daytime_flag, ID)
        newhdf = os.path.join(path, os.path.basename(masterhdf))

        # Try making the directory
        try:
            os.makedirs(path)
        except OSError as exc:
            if exc.errno == errno.EEXIST and os.path.isdir(path):
                pass
            else:
                raise

        # copy original hdf
        try:
            copyfile(masterhdf, newhdf)
        except shutil.SameFileError:
            pass

        # explicitly close file descriptor
        del fd

        fd = GeoDataset(newhdf)
        subdatasets = fd.dataset.GetSubDatasets()

        for dataset in subdatasets:
            ofilename = '{}.vrt'.format(dataset[1].split()[1])
            ofilename_abspath = os.path.join(path, ofilename)
            gdal.Translate(ofilename_abspath, dataset[0], format="VRT")

        # create geo corrected calibrated image
        lats = os.path.join(path, 'PixelLatitude.vrt')
        lons = os.path.join(path, 'PixelLongitude.vrt')
        image = os.path.join(path, 'CalibratedData.vrt')
        geocorrected_image = os.path.join(path, 'CalibratedData_Geo.tif')
        geolocate(image, geocorrected_image, lats, lons)
Esempio n. 6
0
 def geodata(self):
     if not getattr(self, '_geodata',
                    None) and self['image_path'] is not None:
         try:
             self._geodata = GeoDataset(self['image_path'])
             return self._geodata
         except:
             return self['node_id']
     if hasattr(self, '_geodata'):
         return self._geodata
     else:
         return None
Esempio n. 7
0
    def open_browse(self, extension='.jpg'):
        """
        Attempt to open the browse image corresponding to the spc file

        Parameters
        ----------
        extension : str
                    The file type extension to be added to the base name
                    of the spc file.

        Returns
        -------

        """
        path, ext = os.path.splitext(self.input_data)
        self.browse = GeoDataset(path + extension)
Esempio n. 8
0
def project(img, to, mapfile, matchmap=False):
    params = {'from_': img, 'map': mapfile, 'to': to, 'matchmap': matchmap}

    if GeoDataset(img).metadata['IsisCube'].get('Mapping', False):
        try:
            params['interp'] = 'NEARESTNEIGHBOR'
            logger.info('Running map2map on {} with params {}'.format(
                img, params))
            map2map(**params)
        except ProcessError as e:
            logger.info('map2map Error')
            logger.error("STDOUT: {}".format(e.stdout.decode('utf-8')))
            logger.error("STDERR: {}".format(e.stderr.decode('utf-8')))
    else:
        try:
            logger.info('Running cam2map on {}'.format(img))
            cam2map(**params)
        except ProcessError as e:
            logger.info('cam2map Error')
            logger.error("STDOUT: {}".format(e.stdout.decode('utf-8')))
            logger.error("STDERR: {}".format(e.stderr.decode('utf-8')))
Esempio n. 9
0
def normalize_image_res(image1,
                        image2,
                        image2out,
                        image1out,
                        out_type='ISIS3',
                        nodata=-32768.0):
    width = max(image1.pixel_width, image2.pixel_width)

    f1 = gdal.Warp('/vsimem/temp1.out',
                   image1.file_name,
                   targetAlignedPixels=True,
                   xRes=width,
                   yRes=width,
                   format=out_type)
    f2 = gdal.Warp('/vsimem/temp2.out',
                   image2.file_name,
                   targetAlignedPixels=True,
                   xRes=width,
                   yRes=width,
                   format=out_type)
    del (f1, f2)

    temp1 = GeoDataset('/vsimem/temp1.out')
    temp2 = GeoDataset('/vsimem/temp2.out')
    minx = 0
    miny = 0
    maxx = max(temp1.read_array().shape[1], temp2.read_array().shape[1])
    maxy = max(temp1.read_array().shape[0], temp2.read_array().shape[0])

    fp1 = gdal.Translate(image1out,
                         '/vsimem/temp1.out',
                         srcWin=[minx, miny, maxx - minx, maxy - miny],
                         noData=nodata)
    fp2 = gdal.Translate(image2out,
                         '/vsimem/temp2.out',
                         srcWin=[minx, miny, maxx - minx, maxy - miny],
                         noData=nodata)
    del (fp1, fp2)
Esempio n. 10
0
    # Parse args and grab the file handle to the image
    kwargs = parse_args()
    input_file = kwargs.pop('input_file', None)

    #TODO: Tons of logic in here to get extracted
    #try:

    config = AutoCNet_Config()
    db_uri = 'postgresql://{}:{}@{}:{}/{}'.format(config.database_username,
                                                  config.database_password,
                                                  config.database_host,
                                                  config.database_port,
                                                  config.database_name)

    ds = GeoDataset(input_file)

    # Create a camera model for the image
    camera = kwargs.pop('camera')
    camera = create_camera(ds)

    #try:
    # Extract the correspondences
    extractor = kwargs.pop('extractor')
    maxsize = kwargs.pop('maxsize')
    keypoints, descriptors = extract(ds, extractor, maxsize)

    # Setup defaults for the footprints
    footprint_latlon = None
    footprint_bodyfixed = None
Esempio n. 11
0
def master_to_sql(directories, engine):
    """
    """
    metarecs = []
    imagerecs = []

    if isinstance(directories, str):
        directories = [directories]

    for directory in directories:
        ID = os.path.basename(directory)
        hdfs = os.path.join(directory, '{}.tif'.format(ID))
        files = glob(os.path.join(directory, '*.tif'))
        files = sorted(files)
        filecolumns = ['id', 'time', 'geom', 'original'] + [
            os.path.basename(os.path.splitext(file)[0]).lower()
            for file in files
        ]

        try:
            # open any file to get metadata
            ds = GeoDataset(directory + '/CalibratedData_Geo.tif')
            meta = ds.metadata
            meta['id'] = ID

            # array formatting for postgres
            meta['scale_factor'] = '{' + meta['scale_factor'] + '}'

            for key in meta.keys():
                val = meta.pop(key)
                meta[key.lower()] = val

            del ds

            date = datetime.strptime(meta['completiondate'],
                                     "%d-%b-%Y %H:%M:%S")

            ll = float(meta['lon_ll']), float(meta['lat_ll'])
            lr = float(meta['lon_lr']), float(meta['lon_lr'])
            ul = float(meta['lon_ul']), float(meta['lon_ul'])
            ur = float(meta['lon_ur']), float(meta['lon_ur'])

            footprint = WKTElement(Polygon([ll, ul, ur, lr]), srid=4326)

            images_data = [ID, date, footprint, hdfs] + files
        except Exception as e:
            print(e)
            continue
        metarecs.append(meta)
        imagerecs.append(images_data)

    metadf = pd.DataFrame(metarecs)
    imagedf = gpd.GeoDataFrame(imagerecs, columns=filecolumns)

    imagedf.to_sql('images',
                   engine,
                   schema='master',
                   if_exists='append',
                   index=False,
                   dtype={'geom': Geometry('POLYGON', srid=4326)})
    metadf.to_sql('image_attributes',
                  engine,
                  schema='master',
                  if_exists='append',
                  index=False)
Esempio n. 12
0
def master(root, masterhdf):
    """
    Ingestion function for master. Master is unique in that it cannot be pulled
    by an API, therefore original MASTER files have to exist locally.

    Parameters
    ----------
    root : str
           path to the bayleef data directory.

    masterhdf : str
                path to a MASTER .HDF file
    """
    fd = GeoDataset(masterhdf)
    meta = fd.metadata

    meta['bayleef_name'] = 'MASTER'

    for key in meta.keys():
        val = meta.pop(key)
        meta[key.lower()] = val

    date = datetime.strptime(meta['completiondate'], "%d-%b-%Y %H:%M:%S")
    line = meta['flightlinenumber']
    daytime_flag = meta['day_night_flag']
    ID = meta['producer_granule_id'].split('.')[0]

    basepath = path.join(root, 'MASTER', str(date.year), str(line),
                         daytime_flag, ID)
    ogdatapath = path.join(basepath, 'original')
    imagedatapath = path.join(basepath, 'imagedata')

    newhdf = path.join(ogdatapath, path.basename(masterhdf))

    # Try making the directory
    try:
        os.makedirs(basepath)
        os.makedirs(ogdatapath)
        os.makedirs(imagedatapath)
    except OSError as exc:
        if exc.errno == errno.EEXIST and path.isdir(basepath):
            pass
        else:
            raise

    # copy original hdf
    try:
        copyfile(masterhdf, newhdf)
    except shutil.SameFileError:
        pass

    # explicitly close file descriptor
    del fd

    fd = GeoDataset(newhdf)
    subdatasets = fd.dataset.GetSubDatasets()

    for dataset in subdatasets:
        ofilename = '{}.tif'.format(dataset[1].split()[1])
        ofilename_abspath = path.join(imagedatapath, ofilename)
        gdal.Translate(ofilename_abspath, dataset[0], format="GTiff")

    # create geo corrected calibrated image
    lats = path.join(imagedatapath, 'PixelLatitude.tif')
    lons = path.join(imagedatapath, 'PixelLongitude.tif')
    image = path.join(imagedatapath, 'CalibratedData.tif')

    geocorrected_image = path.join(imagedatapath, 'CalibratedData_Geo.tif')
    utils.geolocate(image, geocorrected_image, lats, lons)

    # Master has 50 bands, extract them as seperate files
    for i in range(1, 51):
        gdal.Translate(path.join(imagedatapath, 'b{}.tif'.format(i)),
                       path.join(imagedatapath, 'CalibratedData_Geo.tif'),
                       bandList=[i])

    index_meta = {}

    ll = float(meta['lon_ll']), float(meta['lat_ll'])
    lr = float(meta['lon_lr']), float(meta['lon_lr'])
    ul = float(meta['lon_ul']), float(meta['lon_ul'])
    ur = float(meta['lon_ur']), float(meta['lon_ur'])

    index_meta['geom'] = Polygon([ll, ul, ur, lr]).wkt
    index_meta['id'] = ID

    index_meta['time'] = {}
    index_meta['time']['year'] = date.year
    index_meta['time']['month'] = date.month
    index_meta['time']['day'] = date.day
    index_meta['time']['hour'] = date.hour

    index_json_file = path.join(basepath, 'index.json')
    meta_json_file = path.join(basepath, 'meta.json')
    json.dump(index_meta, open(index_json_file, 'w+'))
    json.dump(meta, open(meta_json_file, 'w+'))
Esempio n. 13
0
    def __init__(self, file_name):

        GeoDataset.__init__(self, file_name)
        HCube.__init__(self)

        self.derived_funcs = get_derived_funcs(m3)
Esempio n. 14
0
        jigsaw(**bundle_parameters)
    except ProcessError as e:
        print('Jigsaw Error')
        print("STDOUT:", e.stdout.decode('utf-8'))
        print("STDERR:", e.stderr.decode('utf-8'))

    df = pd.read_csv('residuals.csv', header=1)

    residuals = df.iloc[1:]['residual.1'].astype(float)

    residual_min = min(residuals)
    residual_max = max(residuals)

    df['residual'].iloc[1:].astype(float).describe()

    img1fh = GeoDataset(img1)
    img2fh = GeoDataset(img2)

    # need to clean up time stuff
    print("Image 1 =======")
    label = img1fh.metadata
    starttime1 = label['IsisCube']['Instrument']['StartTime']
    endtime1 = label['IsisCube']['Instrument']['StopTime']
    print(marstime.getMTfromTime(starttime1 + (endtime1 - starttime1) / 2)[0])
    print(label[4][1]['SolarLongitude'])
    print('LOCAL TIME', marstime.getLTfromTime(starttime1, 0))

    print()
    print("Image 2 =======")
    label = img2fh.metadata
    starttime2 = label['IsisCube']['Instrument']['StartTime']
Esempio n. 15
0
def themis_pairs(root, id1, id2):
    def stats(arr, additional_funcs=[]):
        return {
            'mean': float(np.mean(arr)),
            'min': float(np.min(arr)),
            'max': float(np.max(arr)),
            'stddev': float(np.std(arr))
        }

    # enforce ID1 < ID2
    id1, id2 = sorted([id1, id2])

    data_dir = config.data
    themis_dir1 = os.path.join(data_dir, "THEMIS", id1[0], id1[1], id1)
    themis_dir2 = os.path.join(data_dir, "THEMIS", id2[0], id2[1], id2)
    pair_dir = os.path.join(data_dir, "THEMIS_PAIRS", id1, id2)

    map_file = config.themis.map_file
    if not os.path.isfile(map_file):
        raise Exception("{} does not exist.".format(map_file))


    pair_original_path = os.path.join(pair_dir, 'original')
    pair_images_path = os.path.join(pair_dir, 'imagedata')
    bundle_result_path = os.path.join(pair_dir, 'bundle')
    plot_path = os.path.join(pair_dir, 'plots')

    img1_path = os.path.join(themis_dir1, 'original', 'l1.cub')
    img2_path = os.path.join(themis_dir2, 'original', 'l1.cub')

    img1_cropped_path = os.path.join(pair_original_path, 'source.l1.cub')
    img2_cropped_path = os.path.join(pair_original_path, 'destination.l1.cub')

    img1_projected_path = os.path.join(pair_original_path, 'source.l2.cub')
    img2_projected_path = os.path.join(pair_original_path, 'destination.l2.cub')

    img1_projected_bt_path = os.path.join(pair_original_path, 'source.l2.bt.cub')
    img2_projected_bt_path = os.path.join(pair_original_path, 'destination.l2.bt.cub')

    img2_matchmapped_path =  os.path.join(pair_original_path, 'destination.l2.mm.cub')
    img2_matchmapped_bt_path = os.path.join(pair_original_path, 'destination.l2.bt.mm.cub')

    cubelis = os.path.join(pair_dir, 'filelist.txt')

    cnet_path = os.path.join(bundle_result_path, 'cnet.net')

    autocnet_plot_path = os.path.join(plot_path, 'autocnet.tif')
    histogram_plot_path = os.path.join(plot_path, 'hist.tif')
    overlap_plot_path = os.path.join(plot_path, 'overlap.tif')

    img1_b9_path = os.path.join(pair_images_path, 'source.b9.tif')
    img2_b9_path = os.path.join(pair_images_path, 'destination.b9.tif')
    img1_b9_bt_path = os.path.join(pair_images_path, 'source.b9.bt.tif')
    img2_b9_bt_path = os.path.join(pair_images_path, 'destination.b9.bt.tif')

    rad_diff_image = os.path.join(pair_images_path, 'rad_diff.tif')
    bt_diff_image = os.path.join(pair_images_path, 'bt_diff.tif')

    logger.info('Making directories {} and {}'.format(pair_original_path, pair_images_path))
    os.makedirs(pair_original_path, exist_ok=True)
    os.makedirs(pair_images_path, exist_ok=True)
    os.makedirs(bundle_result_path, exist_ok=True)
    os.makedirs(plot_path, exist_ok=True)

    # write out cubelist
    with open(cubelis, 'w') as f:
        f.write(img1_cropped_path + '\n')
        f.write(img2_cropped_path + '\n')

    logger.info('IDs: {} {}'.format(id1, id2))
    logger.info('DATA DIR: {}'.format(data_dir))
    logger.info('IMAGE 1 PATH: {}'.format(img1_path))
    logger.info('IMAGE 2 PATH: {}'.format(img2_path))
    logger.info('PAIR OG DIR: {}'.format(pair_original_path))
    logger.info('PAIR IMAGE PATH: {}'.format(pair_images_path))
    logger.info('PAIR DIR: {}'.format(pair_dir))

    img1_smithed = False
    img2_smithed = False

    img1_smithed = utils.preprocess(id1, themis_dir1, day=True, validate=True, gtiffs=False, projected_images=False)
    img2_smithed = utils.preprocess(id2, themis_dir2, day=True, validate=True, gtiffs=False, projected_images=False)

    img1_fh = GeoDataset(img1_path)
    img2_fh = GeoDataset(img2_path)

    # minLat maxLat minLon maxLon
    minLat, maxLat, _, _ = img1_fh.footprint.Intersection(img2_fh.footprint).GetEnvelope()
    utils.thm_crop(img1_path, img1_cropped_path, minLat, maxLat)
    utils.thm_crop(img2_path, img2_cropped_path, minLat, maxLat)

    del (img1_fh, img2_fh)

    used_smithed = True
    if not (img1_smithed and img2_smithed):
        logger.info("No smithed kernels found, matching with Autocnet.")
        used_smithed = False
        cg = utils.match_pair(img1_cropped_path, img2_cropped_path, figpath=autocnet_plot_path)
        cg.generate_control_network()
        cg.to_isis(os.path.splitext(cnet_path)[0])

        bundle_parameters = config.themis.bundle_parameters
        bundle_parameters['from_'] = cubelis
        bundle_parameters['cnet'] = cnet_path
        bundle_parameters['onet'] = cnet_path
        bundle_parameters['file_prefix'] = bundle_result_path+'/'
        logger.info("Running Jigsaw, parameters:\n")
        utils.print_dict(bundle_parameters)
        try:
            jigsaw(**bundle_parameters)
        except ProcessError as e:
            logger.error("STDOUT: {}".format(e.stdout.decode('utf-8')))
            logger.error("STDERR: {}".format(e.stderr.decode('utf-8')))
            raise Exception("Jigsaw Error")

    try:
        map_pvl = pvl.load(map_file)
    except Exception as e:
        logger.error("Error loading mapfile {}:\n{}".format(map_file, e))

    logger.info('Projecting {} to {} with map file:\n {}'.format(img1_cropped_path, img1_projected_path, map_pvl))
    utils.project(img1_cropped_path, img1_projected_path, map_file)

    logger.info('Projecting {} to {} with map file:\n {}'.format(img2_cropped_path, img2_projected_path, map_pvl))
    utils.project(img2_cropped_path, img2_projected_path, map_file)

    img1_footprint = GeoDataset(img1_projected_path).footprint
    img2_footprint = GeoDataset(img2_projected_path).footprint
    overlap_geom = img2_footprint.Intersection(img1_footprint)

    try:
        out1, err1 = utils.run_davinci('thm_tb.dv', img1_projected_path, img1_projected_bt_path)
        out2, err2 = utils.run_davinci('thm_tb.dv', img2_projected_path, img2_projected_bt_path)
    except Exception as e:
        logger.error(e)

    try:
        out1, err1 = utils.run_davinci('thm_post_process.dv', img1_projected_bt_path, img1_projected_bt_path)
        out2, err2 = utils.run_davinci('thm_post_process.dv', img2_projected_bt_path, img2_projected_bt_path)

        out1, err1 = utils.run_davinci('thm_bandselect.dv', img1_projected_bt_path, img1_projected_bt_path, args=['band=9'])
        out2, err2 = utils.run_davinci('thm_bandselect.dv', img2_projected_bt_path, img2_projected_bt_path, args=['band=9'])
    except Exception as e:
        logger.error(e)

    try:
        out1, err1 = utils.run_davinci('thm_post_process.dv', img1_projected_path, img1_projected_path)
        out2, err2 = utils.run_davinci('thm_post_process.dv', img2_projected_path, img2_projected_path)

        out1, err1 = utils.run_davinci('thm_bandselect.dv', img1_projected_path, img1_projected_path, args=['band=9'])
        out2, err2 = utils.run_davinci('thm_bandselect.dv', img2_projected_path, img2_projected_path, args=['band=9'])
    except Exception as e:
        logger.error(e)


    footprintinit(from_=img2_projected_bt_path)
    footprintinit(from_=img2_projected_path)

    logger.info('Creating matchmapped cubes')
    utils.project(img2_projected_path, img2_matchmapped_path, img1_projected_path, matchmap=True)
    utils.project(img2_projected_bt_path, img2_matchmapped_bt_path, img1_projected_bt_path, matchmap=True)

    img1_projected = GeoDataset(img1_projected_path)
    img2_projected = GeoDataset(img2_matchmapped_path)

    arr1 = img1_projected.read_array()
    arr2 = img2_projected.read_array()

    arr1[arr1 == pysis.specialpixels.SPECIAL_PIXELS['Real']['Null']] = 0
    arr2[arr2 == pysis.specialpixels.SPECIAL_PIXELS['Real']['Null']] = 0
    arr1[arr1 == -32768.] = 0
    arr2[arr2 == -32768.] = 0

    arr1 = np.ma.MaskedArray(arr1, arr1 == 0)
    arr2 = np.ma.MaskedArray(arr2, arr2 == 0)

    img1_b9_overlap = np.ma.MaskedArray(arr1.data, arr1.mask | arr2.mask)
    img2_b9_overlap = np.ma.MaskedArray(arr2.data, arr1.mask | arr2.mask)
    rad_diff = np.ma.MaskedArray(img1_b9_overlap.data-img2_b9_overlap.data, arr1.mask | arr2.mask)

    img1rads = img1_b9_overlap[~img1_b9_overlap.mask]
    img2rads = img2_b9_overlap[~img2_b9_overlap.mask]

    img1_b9_overlap.data[img1_b9_overlap.mask] = 0
    img2_b9_overlap.data[img2_b9_overlap.mask] = 0
    rad_diff.data[rad_diff.mask] = 0

    # logger.info('Writing {}'.format(img1_b9_path))
    # ds = utils.array2raster(img1_projected_path, img1_b9_overlap, img1_b9_path)
    # del ds
    #
    # logger.info('Writing {}'.format(img2_b9_path))
    # ds = utils.array2raster(img2_projected_path, img2_b9_overlap, img2_b9_path)
    # del ds

    logger.info('Writing {}'.format(rad_diff_image))
    ds = utils.array2raster(img1_projected_path, rad_diff, rad_diff_image)
    del ds

    img1_bt_projected = GeoDataset(img1_projected_bt_path)
    img2_bt_projected = GeoDataset(img2_matchmapped_bt_path)

    arr1 = img1_bt_projected.read_array()
    arr2 = img2_bt_projected.read_array()
    arr1[arr1 == pysis.specialpixels.SPECIAL_PIXELS['Real']['Null']] = 0
    arr2[arr2 == pysis.specialpixels.SPECIAL_PIXELS['Real']['Null']] = 0
    arr1[arr1 == -32768.] = 0
    arr2[arr2 == -32768.] = 0

    arr1 = np.ma.MaskedArray(arr1, arr1 == 0)
    arr2 = np.ma.MaskedArray(arr2, arr2 == 0)

    img1_b9_bt_overlap = np.ma.MaskedArray(arr1.data, arr1.mask | arr2.mask)
    img2_b9_bt_overlap = np.ma.MaskedArray(arr2.data, arr1.mask | arr2.mask)
    bt_diff = np.ma.MaskedArray(img1_b9_bt_overlap.data-img2_b9_bt_overlap.data, arr1.mask | arr2.mask)

    img1bt = img1_b9_bt_overlap[~img1_b9_bt_overlap.mask]
    img2bt = img2_b9_bt_overlap[~img2_b9_bt_overlap.mask]

    img1_b9_bt_overlap.data[img1_b9_bt_overlap.mask] = 0
    img2_b9_bt_overlap.data[img2_b9_bt_overlap.mask] = 0
    bt_diff.data[bt_diff.mask] = 0

    # logger.info('Writing {}'.format(img1_b9_bt_path))
    # ds = utils.array2raster(img1_projected_bt_path, img1_b9_bt_overlap, img1_b9_bt_path)
    # del ds
    #
    # logger.info('Writing {}'.format(img2_b9_bt_path))
    # ds = utils.array2raster(img2_projected_bt_path, img2_b9_bt_overlap, img2_b9_bt_path)
    # del ds

    logger.info('Writing {}'.format(bt_diff_image))
    ds = utils.array2raster(img1_projected_bt_path, bt_diff, bt_diff_image)
    del ds

    img1_campt = pvl.loads(campt(from_=img1_path))['GroundPoint']
    img2_campt = pvl.loads(campt(from_=img1_path))['GroundPoint']

    img1_date = GeoDataset(img1_path).metadata['IsisCube']['Instrument']['StartTime']
    img2_date = GeoDataset(img2_path).metadata['IsisCube']['Instrument']['StartTime']

    metadata = {}
    metadata['img1'] = {}
    metadata['img1']['rad'] = stats(img1rads)
    metadata['img1']['tb'] = stats(img1bt)
    metadata['img1']['emission_angle'] = img1_campt['Emission'].value
    metadata['img1']['incidence_angle'] = img1_campt['Incidence'].value
    metadata['img1']['solar_lon'] = img1_campt['SolarLongitude'].value
    metadata['img1']['date'] = {
        'year' : img1_date.year,
        'month' : img1_date.month,
        'day': img1_date.day
    }

    metadata['img2'] = {}
    metadata['img2']['rad'] = stats(img2rads)
    metadata['img2']['tb'] = stats(img1bt)
    metadata['img2']['emission_angle'] = img2_campt['Emission'].value
    metadata['img2']['incidence_angle'] = img2_campt['Incidence'].value
    metadata['img2']['solar_lon'] = img2_campt['SolarLongitude'].value
    metadata['img2']['date'] = {
        'year' : img2_date.year,
        'month' : img2_date.month,
        'day': img2_date.day
    }

    metadata['diff'] = {}
    metadata['diff']['rad'] = stats(rad_diff)
    metadata['diff']['tb'] = stats(bt_diff)
    metadata['diff']['date(days)'] = (img1_date - img2_date).days
    metadata['id1'] = id1
    metadata['id2'] = id2

    metadata['plots'] = {}
    metadata['plots']['rad_hist'] = os.path.join(plot_path, 'rad_hist.png')
    metadata['plots']['tb_hist'] = os.path.join(plot_path, 'tb_hist.png')
    metadata['plots']['diff_hist'] = os.path.join(plot_path, 'diff_hist.png')
    metadata['plots']['match_plot'] = autocnet_plot_path

    if not used_smithed:
        metadata['plots']['matching_plot'] = autocnet_plot_path
        metadata['bundle'] = {}
        for f in glob(os.path.join(bundle_result_path, '*')):
            metadata['bundle'][os.path.basename(os.path.splitext(f)[0])] = f


        try:
            df = pd.read_csv(metadata['bundle']['residuals'], header=1)
        except:
            df = pd.read_csv(metadata['bundle']['_residuals'], header=1)

        metadata['bundle']['residual_stats'] = stats(np.asarray(df['residual.1'][1:], dtype=float))

    utils.print_dict(metadata)

    plt.figure(figsize=(25,10))
    bins = sns.distplot(img1rads[~img1rads.mask], kde=False, norm_hist=False, label='{} {}'.format(id1, os.path.basename(img1_b9_path)))
    bins = sns.distplot(img2rads[~img2rads.mask], kde=False, norm_hist=False, label='{} {}'.format(id2,os.path.basename(img2_b9_path)))
    bins.set(xlabel='radiance', ylabel='counts')
    plt.legend()
    plt.savefig(metadata['plots']['rad_hist'])
    plt.close()

    plt.figure(figsize=(25,10))
    bins = sns.distplot(img1bt[~img1bt.mask], kde=False, norm_hist=False, label='{} {}'.format(id1, os.path.basename(img1_b9_bt_path)))
    bins = sns.distplot(img2bt[~img2bt.mask], kde=False, norm_hist=False, label='{} {}'.format(id2, os.path.basename(img2_b9_bt_path)))
    bins.set(xlabel='Brightness Temp', ylabel='counts')
    plt.legend()
    plt.savefig(metadata['plots']['tb_hist'])
    plt.close()

    plt.figure(figsize=(25,10))
    diffplot = sns.distplot(rad_diff[~rad_diff.mask],  kde=False)
    diffplot.set(xlabel='Delta Radiance', ylabel='counts')
    plt.savefig(metadata['plots']['diff_hist'])
    plt.close()

    metadata_path = os.path.join(pair_dir, 'metadata.json')
    json.dump(metadata,open(metadata_path, 'w+'), default=utils.date_converter)

    index_path = os.path.join(pair_dir, 'index.json')

    index = {}
    print(GeoDataset(img1_cropped_path).footprint.ExportToWkt())
    print(GeoDataset(img2_cropped_path).footprint.ExportToWkt())

    index['overlap_geom'] = overlap_geom.ExportToWkt()
    index['img1_geom'] =  img1_footprint.ExportToWkt()
    index['img2_geom'] =  img2_footprint.ExportToWkt()
    index['id'] = '{}_{}'.format(id1, id2)
    json.dump(index, open(index_path, 'w+'))

    utils.print_dict(index)
    logger.info("Complete")
Esempio n. 16
0
def array2raster(rasterfn, array, newRasterfn):
    """
    Writes an array to a GeoDataset using another dataset as reference. Borrowed
    from: https://pcjericks.github.io/py-gdalogr-cookbook/raster_layers.html

    Parameters
    ----------
    rasterfn : str, GeoDataset
               Dataset or path to the dataset to use as a reference. Geotransform
               and spatial reference information is copied into the new image.

    array : np.array
            Array to write

    newRasterfn : str
                  Filename for new raster image

    Returns
    -------
    : GeoDataset
      File handle for the new raster file

    """
    naxis = len(array.shape)
    assert naxis == 2 or naxis == 3

    if naxis == 2:
        # exapnd the third dimension
        array = array[:, :, None]

    nbands = array.shape[2]

    if isinstance(rasterfn, GeoDataset):
        rasterfn = rasterfn.file_name

    raster = gdal.Open(rasterfn)
    geotransform = raster.GetGeoTransform()
    originX = geotransform[0]
    originY = geotransform[3]
    pixelWidth = geotransform[1]
    pixelHeight = geotransform[5]
    cols = array.shape[1]
    rows = array.shape[0]

    driver = gdal.GetDriverByName('GTiff')
    outRaster = driver.Create(newRasterfn, cols, rows, nbands,
                              gdal.GDT_Float32)
    outRaster.SetGeoTransform(
        (originX, pixelWidth, 0, originY, 0, pixelHeight))

    for band in range(1, nbands + 1):
        outband = outRaster.GetRasterBand(band)
        # Bands use indexing starting at 1
        outband.WriteArray(array[:, :, band - 1])
        outband.FlushCache()

    outRasterSRS = osr.SpatialReference()
    outRasterSRS.ImportFromWkt(raster.GetProjectionRef())
    outRaster.SetProjection(outRasterSRS.ExportToWkt())
    outRaster = None
    return GeoDataset(newRasterfn)
Esempio n. 17
0
def preprocess(thm_id,
               outdir,
               day=True,
               validate=False,
               projected_images=True,
               map_file=config.themis.map_file,
               originals=True,
               gtiffs=False,
               meta=True,
               index=True):
    '''
    Downloads Themis file by ID and runs it through spice init and
    footprint init.
    '''
    original = os.path.join(outdir, 'original')
    images = os.path.join(outdir, 'images')

    ogcube = os.path.join(original, 'l1.cub')
    projcube = os.path.join(original, 'l2.cub')
    metafile = os.path.join(outdir, 'meta.json')
    indexfile = os.path.join(outdir, 'index.json')

    os.makedirs(original, exist_ok=True)
    os.makedirs(images, exist_ok=True)

    kerns = get_controlled_kernels(thm_id)

    if os.path.exists(outdir) and os.path.exists(original) and os.path.exists(
            metafile) and os.path.exists(indexfile):
        logger.info("File {} Exists, skipping redownload.".format(outdir))
        return bool(kerns)

    if originals:
        if day:
            out, err = run_davinci('thm_pre_process.dv',
                                   infile=thm_id,
                                   outfile=ogcube)
        else:
            out, err = run_davinci('thm_pre_process_night.dv',
                                   infile=thm_id,
                                   outfile=ogcube)

        if validate:
            try:
                init(ogcube, additional_kernels=kerns)
                label = pvl.loads(campt(from_=ogcube))
            except ProcessError as e:
                logger.info('campt Error')
                logger.info('file: {}'.format(outfile))
                logger.error("STDOUT: {}".format(e.stdout.decode('utf-8')))
                logger.error("STDERR: {}".format(e.stderr.decode('utf-8')))

            incidence_angle = label['GroundPoint']['Incidence'].value

            if day and incidence_angle > 90:
                logger.info(
                    "incidence angle suggests night, but {} was proccessed for day, reprocessing"
                    .format(thm_id))
                out, err = run_davinci('thm_pre_process_night.dv',
                                       infile=thm_id,
                                       outfile=ogcube)
                init(ogcube, additional_kernels=kerns)
            elif not day and incidence_angle <= 90:
                logger.info(
                    "incidence angle suggests day, but {} was proccessed for night, reprocessing"
                    .format(thm_id))
                out, err = run_davinci('thm_pre_process.dv',
                                       infile=thm_id,
                                       outfile=ogcube)
                init(ogcube, additional_kernels=kerns)

        else:
            init(ogcube, additional_kernels=kerns)

        if projected_images:
            project(ogcube, projcube, map_file)

    img = GeoDataset(ogcube)

    if meta:
        meta = json.loads(
            json.dumps(img.metadata,
                       default=lambda o: str(o)
                       if isinstance(o, datetime) else o))
        try:
            meta['map_file'] = str(pvl.load(map_file))
        except Exception as e:
            logger.error("Failed to load map file {}:\n{}".format(map_file, e))
            raise Exception("Invalid map file.")

        json.dump(meta, open(metafile, 'w+'))
        if kerns:
            logger.info('Used Controlled Kernels')
            meta['used_control_kernels'] = True

    if index:
        date = img.metadata['IsisCube']['Instrument']['StartTime']
        index_meta = {}
        index_meta['geom'] = img.footprint.ExportToWkt()
        index_meta['id'] = thm_id
        index_meta['time'] = {}
        index_meta['time']['year'] = date.year
        index_meta['time']['month'] = date.month
        index_meta['time']['day'] = date.day
        index_meta['time']['hour'] = date.hour
        nbands = img.nbands
        json.dump(index_meta, open(indexfile, 'w+'))

    del img

    if gtiffs:
        for band in range(1, nbands + 1):
            tiffpath = os.path.join(images, 'b{}.tiff'.format(band))
            logger.info('Writing: {}'.format(tiffpath))
            gdal.Translate(tiffpath, ogcube, bandList=[band], format='GTiff')

    return bool(kerns)
Esempio n. 18
0
def match_pair(img1_path, img2_path, figpath=None):
    src_points = point_grid(GeoDataset(img1_path), step=50)
    f = open('temp.txt', 'w+')
    f.write('\n'.join('{}, {}'.format(int(x), int(y)) for x, y in src_points))
    del f

    label = pvl.loads(
        campt(from_=img1_path, coordlist='temp.txt', coordtype='image'))
    points = []
    for group in label:
        try:
            lat = group[1]['PlanetocentricLatitude'].value
            lon = group[1]['PositiveEast360Longitude'].value
            points.append([lat, lon])
        except Exception as e:
            continue

    logger.info(
        "{} points from image1 successfully reprojected to image2, rejected {}"
        .format(str(len(points)), str(len(src_points) - len(points))))

    if len(points) == 0:
        raise Exception("No valid points were found for pair {} {}".format(
            img1_path, img2_path))

    f = open('temp.txt', 'w+')
    f.write('\n'.join('{}, {}'.format(x, y) for x, y in points))
    del f

    img2label = pvl.loads(
        campt(from_=img2_path,
              coordlist='temp.txt',
              coordtype='ground',
              allowoutside=False))
    dst_lookup = {}
    for i, group in enumerate(img2label):
        if not group[1]['Error']:
            line = group[1]['Line']
            sample = group[1]['Sample']
            dst_lookup[i] = [sample, line]

    filelist = [img1_path, img2_path]
    cg = CandidateGraph.from_filelist(filelist)

    edge = cg[0][1]['data']
    img1 = GeoDataset(img1_path)
    img2 = GeoDataset(img2_path)

    src_keypoints = pd.DataFrame(data=src_points, columns=['x', 'y'])
    src_keypoints['response'] = 0
    src_keypoints['angle'] = 0
    src_keypoints['octave'] = 0
    src_keypoints['layer'] = 0
    src_keypoints
    edge.source._keypoints = src_keypoints

    results = []
    dst_keypoints = []
    dst_index = 0
    distances = []

    arr1 = img1.read_array()
    arr2 = img2.read_array()
    del img1
    del img2

    for keypoint in edge.source.keypoints.iterrows():
        index, row = keypoint

        sx, sy = row['x'], row['y']

        try:
            dx, dy = dst_lookup[index]
        except KeyError:
            continue

        try:
            ret = refine_subpixel(sx,
                                  sy,
                                  dx,
                                  dy,
                                  arr1,
                                  arr2,
                                  size=50,
                                  reduction=10,
                                  convergence_threshold=1)
        except Exception as ex:
            continue

        if ret is not None:
            x, y, metrics = ret
        else:
            continue

        dist = np.linalg.norm([x - dx, y - dy])
        results.append([0, index, 1, dst_index, dist])
        dst_keypoints.append([x, y, 0, 0, 0, 0, 0])
        dst_index += 1

    matches = pd.DataFrame(data=results,
                           columns=[
                               'source_image', 'source_idx',
                               'destination_image', 'destination_idx',
                               'distance'
                           ])

    if matches.empty:
        logger.error(
            "After matching points, matches dataframe returned empty.")

    dst_keypoints = pd.DataFrame(
        data=dst_keypoints,
        columns=['x', 'y', 'response', 'size', 'angle', 'octave', 'layer'])
    edge.destination._keypoints = dst_keypoints

    edge._matches = matches
    edge.compute_fundamental_matrix()
    distance_check(edge, clean_keys=['fundamental'])

    if figpath:
        plt.figure(figsize=(10, 25))
        cg[0][1]['data'].plot(clean_keys=['fundamental', 'distance'],
                              nodata=-32768.0)
        plt.savefig(figpath)
        plt.close()
    return cg