예제 #1
0
파일: landsat.py 프로젝트: danlopez00/gips
    def _readraw(self):
        """ Read in Landsat bands using original tar.gz file """
        start = datetime.now()
        # make sure metadata is loaded
        self.meta()

        if settings().REPOS[self.Repository.name]['extract']:
            # Extract all files
            datafiles = self.assets[''].extract(self.metadata['filenames'])
        else:
            # Use tar.gz directly using GDAL's virtual filesystem
            datafiles = [os.path.join('/vsitar/' + self.assets[''].filename, f) for f in self.metadata['filenames']]

        image = gippy.GeoImage(datafiles)
        image.SetNoData(0)

        # TODO - set appropriate metadata
        #for key,val in meta.iteritems():
        #    image.SetMeta(key,str(val))

        # Geometry used for calculating incident irradiance
        colors = self.assets['']._sensors[self.sensor_set[0]]['colors']
        for bi in range(0, len(self.metadata['filenames'])):
            image.SetBandName(colors[bi], bi + 1)
            # need to do this or can we index correctly?
            band = image[bi]
            band.SetGain(self.metadata['gain'][bi])
            band.SetOffset(self.metadata['offset'][bi])
            dynrange = self.metadata['dynrange'][bi]
            band.SetDynamicRange(dynrange[0], dynrange[1])
            image[bi] = band

        VerboseOut('%s: read in %s' % (image.Basename(), datetime.now() - start), 2)
        return image
예제 #2
0
파일: core.py 프로젝트: aniucd/gips
    def fetch_ftp(cls, asset, tile, date):
        """ Fetch via FTP """
        url = cls._assets[asset].get('url', '')
        if url == '':
            raise Exception("%s: URL not defined for asset %s" % (cls.__name__, asset))
        VerboseOut('%s: fetch tile %s for %s' % (asset, tile, date), 3)
        ftpurl = url.split('/')[0]
        ftpdir = url[len(ftpurl):]
        try:
            ftp = ftplib.FTP(ftpurl)
            ftp.login('anonymous', settings().EMAIL)
            pth = os.path.join(ftpdir, date.strftime('%Y'), date.strftime('%j'))
            ftp.set_pasv(True)
            ftp.cwd(pth)

            filenames = []
            ftp.retrlines('LIST', filenames.append)

            for f in ftp.nlst('*'):
                VerboseOut("Downloading %s" % f, 2)
                ftp.retrbinary('RETR %s' % f, open(os.path.join(cls.Repository.path('stage'), f), "wb").write)
            ftp.close()
        except Exception, e:
            VerboseOut(traceback.format_exc(), 4)
            raise Exception("Error downloading: %s" % e)
예제 #3
0
def t_settings_user(mocker):
    """gips.settings should load user settings first."""
    mocker.patch.object(utils.os.path, 'isfile').return_value = True
    mocker.patch.object(utils.os.path, 'expanduser').return_value = 'whatever'
    m_load_source = mocker.patch.object(utils.imp, 'load_source')
    fake_settings = m_load_source.return_value # a MagicMock
    assert utils.settings() == fake_settings
예제 #4
0
    def fetch_ftp(cls, asset, tile, date):
        """ Fetch via FTP """
        url = cls._assets[asset].get('url', '')
        if url == '':
            raise Exception("%s: URL not defined for asset %s" % (cls.__name__, asset))
        VerboseOut('%s: fetch tile %s for %s' % (asset, tile, date), 3)
        ftpurl = url.split('/')[0]
        ftpdir = url[len(ftpurl):]
        try:
            ftp = ftplib.FTP(ftpurl)
            ftp.login('anonymous', settings().EMAIL)
            pth = os.path.join(ftpdir, date.strftime('%Y'), date.strftime('%j'))
            ftp.set_pasv(True)
            ftp.cwd(pth)

            filenames = []
            ftp.retrlines('LIST', filenames.append)

            for f in ftp.nlst('*'):
                VerboseOut("Downloading %s" % f, 2)
                ftp.retrbinary('RETR %s' % f, open(os.path.join(cls.Repository.path('stage'), f), "wb").write)
            ftp.close()
        except Exception, e:
            VerboseOut(traceback.format_exc(), 4)
            raise Exception("Error downloading: %s" % e)
예제 #5
0
def t_settings_global(mocker):
    """gips.settings should fall back on gips.settings when user settings fail."""
    # force into the second clause
    mocker.patch.object(utils.os.path, 'isfile').return_value = False
    # fake out `import gips.settings` with mocks and trickery:
    fake_gips = mocker.Mock()
    fake_settings = fake_gips.settings
    sys.modules['gips'] = fake_gips
    sys.modules['gips.settings'] = fake_settings
    assert utils.settings() == fake_settings
예제 #6
0
파일: config.py 프로젝트: dchowdhury/gips
def main():
    import gips
    title = 'GIPS Configuration Utility (v%s)' % (version)

    try:
        parser = GIPSParser(description=title, datasources=False)
        subparser = parser.add_subparsers(dest='command')
        subparser.add_parser('print', help='Print current settings')
        p = subparser.add_parser(
            'env', help='Configure GIPS repositories in this environment')
        p.add_argument('-r',
                       '--repos',
                       help='Top level directory for repositories',
                       default='/data/repos')
        p.add_argument(
            '-e',
            '--email',
            help='Set email address (used for anonymous FTP sources)',
            default='')
        p = subparser.add_parser(
            'user',
            help=
            'Configure GIPS repositories for this user (for per user customizations)'
        )
        #p.add_argument('-e', '--email', help='Set email address (used for anonymous FTP sources)')
        #h = 'Install full configuration file without inheriting from environment settings'
        #p.add_argument('-f', '--full', help=h, default=False, action='store_true')
        args = parser.parse_args()
        print title

        if args.command == 'print':
            try:
                from gips.utils import settings
                s = settings()
                for v in dir(s):
                    if not v.startswith('__') and v != 'gips':
                        print
                        print v
                        exec('pprint.pprint(s.%s)' % v)
            except Exception, e:
                #print traceback.format_exc()
                print 'Unable to access settings. Run `gips_config env`'

        elif args.command == 'env':
            try:
                cfgfile = create_environment_settings(args.repos,
                                                      email=args.email)
                print 'Environment settings file: %s' % cfgfile
                print 'Creating repository directories'
                create_repos()
            except Exception, e:
                print traceback.format_exc()
                print 'Could not create environment settings: %s' % e
예제 #7
0
파일: landsat.py 프로젝트: demiurg/gips
 def _readqa(self):
     # make sure metadata is loaded
     if not hasattr(self, 'metadata'):
         self.meta()
     if settings().REPOS[self.Repository.name.lower()]['extract']:
         # Extract files
         qadatafile = self.assets['DN'].extract([self.metadata['qafilename']])
     else:
         # Use tar.gz directly using GDAL's virtual filesystem
         qadatafile = os.path.join('/vsitar/' + self.assets['DN'].filename, self.metadata['qafilename'])
     qaimg = gippy.GeoImage(qadatafile)
     return qaimg
예제 #8
0
 def _readqa(self):
     # make sure metadata is loaded
     if not hasattr(self, 'metadata'):
         self.meta()
     if settings().REPOS[self.Repository.name.lower()]['extract']:
         # Extract files
         qadatafile = self.assets['DN'].extract(
             [self.metadata['qafilename']])
     else:
         # Use tar.gz directly using GDAL's virtual filesystem
         qadatafile = os.path.join('/vsitar/' + self.assets['DN'].filename,
                                   self.metadata['qafilename'])
     qaimg = gippy.GeoImage(qadatafile)
     return qaimg
예제 #9
0
파일: core.py 프로젝트: aniucd/gips
 def get_setting(cls, key):
     """ Get value from repo settings """
     dataclass = cls.__name__[:-10]
     r = settings().REPOS[dataclass]
     if key not in r.keys():
         # not in settings file, use defaults
         exec('import gips.data.%s as clsname' % dataclass)
         driverpath = os.path.dirname(clsname.__file__)
         if key == 'driver':
             return driverpath
         elif key == 'tiles':
             return os.path.join(driverpath, 'tiles.shp')
         else:
             raise Exception('%s is not a valid setting!' % key)
     else:
         return r[key]
예제 #10
0
 def get_setting(cls, key):
     """ Get value from repo settings """
     dataclass = cls.__name__[:-10]
     r = settings().REPOS[dataclass]
     if key not in r.keys():
         # not in settings file, use defaults
         exec('import gips.data.%s as clsname' % dataclass)
         driverpath = os.path.dirname(clsname.__file__)
         if key == 'driver':
             return driverpath
         elif key == 'tiles':
             return os.path.join(driverpath, 'tiles.shp')
         else:
             raise Exception('%s is not a valid setting!' % key)
     else:
         return r[key]
예제 #11
0
def main():
    title = Colors.BOLD + 'GIPS Image Statistics (v%s)' % __version__ + Colors.OFF

    parser0 = GIPSParser(datasources=False, description=title)
    parser0.add_projdir_parser()
    group = parser0.add_argument_group('masking options')
    args = parser0.parse_args()

    utils.gips_script_setup(stop_on_error=args.stop_on_error)
    print title

    # TODO - check that at least 1 of filemask or pmask is supplied
    header = ['date', 'band', 'min', 'max', 'mean', 'sd', 'skew', 'count']

    with utils.error_handler():
        for projdir in args.projdir:
            VerboseOut('Stats for Project directory: %s' % projdir, 1)
            inv = ProjectInventory(projdir, args.products)

            p_dates = {} # map each product to its list of valid dates
            for date in inv.dates:
                for p in inv.products(date):
                    p_dates.setdefault(p, []).append(date)
            p_dates = {p: sorted(dl) for p, dl in p_dates.items()}

            for p_type, valid_dates in p_dates.items():
                stats_fn = os.path.join(projdir, p_type + '_stats.txt')
                with open(stats_fn, 'w') as stats_fo:
                    sf = getattr(utils.settings(), 'STATS_FORMAT', {})
                    writer = csv.writer(stats_fo, **sf)
                    writer.writerow(header)

                    # print date, band description, and stats
                    for date in valid_dates:
                        img = inv[date].open(p_type)
                        date_str = date.strftime('%Y-%j')
                        utils.verbose_out('Computing stats for {} {}'.format(
                                p_type, date_str), 2)
                        for b in img:
                            stats = [str(s) for s in b.Stats()]
                            writer.writerow(
                                    [date_str, b.Description()] + stats)
                        img = None

    utils.gips_exit() # produce a summary error report then quit with a proper exit status
예제 #12
0
    def _readraw(self):
        """ Read in Landsat bands using original tar.gz file """
        start = datetime.now()
        # make sure metadata is loaded
        if not hasattr(self, 'metadata'):
            self.meta()

        if settings().REPOS[self.Repository.name.lower()]['extract']:
            # Extract all files
            datafiles = self.assets['DN'].extract(self.metadata['filenames'])
        else:
            # Use tar.gz directly using GDAL's virtual filesystem
            datafiles = [
                os.path.join('/vsitar/' + self.assets['DN'].filename, f)
                for f in self.metadata['filenames']
            ]

        image = gippy.GeoImage(datafiles)
        image.SetNoData(0)

        # TODO - set appropriate metadata
        #for key,val in meta.iteritems():
        #    image.SetMeta(key,str(val))

        # Geometry used for calculating incident irradiance
        # colors = self.assets['DN']._sensors[self.sensor_set[0]]['colors']

        sensor = self.assets['DN'].sensor
        colors = self.assets['DN']._sensors[sensor]['colors']

        for bi in range(0, len(self.metadata['filenames'])):
            image.SetBandName(colors[bi], bi + 1)
            # need to do this or can we index correctly?
            band = image[bi]
            band.SetGain(self.metadata['gain'][bi])
            band.SetOffset(self.metadata['offset'][bi])
            dynrange = self.metadata['dynrange'][bi]
            band.SetDynamicRange(dynrange[0], dynrange[1])
            image[bi] = band

        VerboseOut(
            '%s: read in %s' % (image.Basename(), datetime.now() - start), 2)
        return image
예제 #13
0
def main():
    import gips
    title = 'GIPS Configuration Utility (v%s)' % (version)

    parser = GIPSParser(description=title, datasources=False)
    subparser = parser.add_subparsers(dest='command')
    subparser.add_parser('print', help='Print current settings')
    p = subparser.add_parser('env', help='Configure GIPS repositories in this environment')
    p.add_argument('-r', '--repos', help='Top level directory for repositories', default='/data/repos')
    p.add_argument('-e', '--email', help='Set email address (used for anonymous FTP sources)', default='')
    p = subparser.add_parser('user', help='Configure GIPS repositories for this user (for per user customizations)')
    #p.add_argument('-e', '--email', help='Set email address (used for anonymous FTP sources)')
    #h = 'Install full configuration file without inheriting from environment settings'
    #p.add_argument('-f', '--full', help=h, default=False, action='store_true')
    args = parser.parse_args()
    print title

    if args.command == 'print':
        try:
            from gips.utils import settings
            s = settings()
            for v in dir(s):
                if not v.startswith('__') and v != 'gips':
                    print
                    print v
                    exec('pprint.pprint(s.%s)' % v)
        except Exception as e:
            # print traceback.format_exc()
            print 'Unable to access settings: {}'.format(e)
            sys.exit(1)

    elif args.command == 'env':
        try:
            cfgfile = create_environment_settings(args.repos, email=args.email)
            print 'Environment settings file: %s' % cfgfile
            print 'Creating repository directories'
            create_repos()
        except Exception, e:
            print traceback.format_exc()
            print 'Could not create environment settings: %s' % e
예제 #14
0
파일: prism.py 프로젝트: ircwaves/gips
    def fetch_ftp(cls, asset, tile, date):
        """ Fetch via FTP """
        url = cls._assets[asset].get('url', '')
        if url == '':
            raise Exception("%s: URL not defined for asset %s" % (cls.__name__, asset))
        VerboseOut('%s: fetch tile %s for %s' % (asset, tile, date), 3)
        if url.startswith('ftp://'):
            #drop ftp:// if given
            url = url[6:]
        ftpurl = url.split('/')[0]
        ftpdir = url[len(ftpurl):]
        try:
            ftp = ftplib.FTP(ftpurl)
            ftp.login('anonymous', settings().EMAIL)
            pth = os.path.join(ftpdir, date.strftime('%Y'))
            ftp.set_pasv(True)
            ftp.cwd(pth)

            filenames = []
            ftp.retrlines('LIST', filenames.append)
            filenames = map(lambda x: x.split(' ')[-1], filenames)
            filenames = filter(
                lambda x: date.strftime('%Y%m%d') in x,
                filenames
            )
            if len(filenames) > 1:
                filenames = sorted(filenames, key=lambda x: prismAsset(x).ver_stab, reverse=True)
            filename = filenames[0]
            stagedir = tempfile.mkdtemp(
                prefix='prismDownloader',
                dir=cls.Repository.path('stage')
            )
            ofilename = os.path.join(stagedir, filename)
            VerboseOut("Downloading %s" % filename, 2)
            with open(ofilename, "wb") as ofile:
                ftp.retrbinary('RETR %s' % filename, ofile.write)
            ftp.close()
        except Exception, e:
            raise Exception("Error downloading: %s" % e)
예제 #15
0
파일: prism.py 프로젝트: ircwaves/gips
    def fetch_ftp(cls, asset, tile, date):
        """ Fetch via FTP """
        url = cls._assets[asset].get('url', '')
        if url == '':
            raise Exception("%s: URL not defined for asset %s" %
                            (cls.__name__, asset))
        VerboseOut('%s: fetch tile %s for %s' % (asset, tile, date), 3)
        if url.startswith('ftp://'):
            #drop ftp:// if given
            url = url[6:]
        ftpurl = url.split('/')[0]
        ftpdir = url[len(ftpurl):]
        try:
            ftp = ftplib.FTP(ftpurl)
            ftp.login('anonymous', settings().EMAIL)
            pth = os.path.join(ftpdir, date.strftime('%Y'))
            ftp.set_pasv(True)
            ftp.cwd(pth)

            filenames = []
            ftp.retrlines('LIST', filenames.append)
            filenames = map(lambda x: x.split(' ')[-1], filenames)
            filenames = filter(lambda x: date.strftime('%Y%m%d') in x,
                               filenames)
            if len(filenames) > 1:
                filenames = sorted(filenames,
                                   key=lambda x: prismAsset(x).ver_stab,
                                   reverse=True)
            filename = filenames[0]
            stagedir = tempfile.mkdtemp(prefix='prismDownloader',
                                        dir=cls.Repository.path('stage'))
            ofilename = os.path.join(stagedir, filename)
            VerboseOut("Downloading %s" % filename, 2)
            with open(ofilename, "wb") as ofile:
                ftp.retrbinary('RETR %s' % filename, ofile.write)
            ftp.close()
        except Exception, e:
            raise Exception("Error downloading: %s" % e)
예제 #16
0
파일: landsat.py 프로젝트: dchowdhury/gips
class landsatData(Data):
    name = 'Landsat'
    version = '0.9.0'

    Asset = landsatAsset

    _prodpattern = '*.tif'
    # Group products belong to ('Standard' if not specified)
    _productgroups = {
        'Index':
        ['bi', 'evi', 'lswi', 'msavi2', 'ndsi', 'ndvi', 'ndwi', 'satvi'],
        'Tillage': ['ndti', 'crc', 'sti', 'isti'],
    }
    __toastring = 'toa: use top of the atmosphere reflectance'
    _products = {
        #'Standard':
        'rad': {
            'description': 'Surface-leaving radiance',
            'arguments': [__toastring]
        },
        'ref': {
            'description': 'Surface reflectance',
            'arguments': [__toastring]
        },
        'temp': {
            'description': 'Brightness (apparent) temperature',
            'toa': True
        },
        'acca': {
            'description':
            'Automated Cloud Cover Assessment',
            'arguments': [
                'X: erosion kernel diameter in pixels (default: 5)',
                'Y: dilation kernel diameter in pixels (default: 10)',
                'Z: cloud height in meters (default: 4000)'
            ],
            'nargs':
            '*',
            'toa':
            True
        },
        'fmask': {
            'description': 'Fmask cloud cover',
            'nargs': '*',
            'toa': True
        },
        'tcap': {
            'description': 'Tassled cap transformation',
            'toa': True
        },
        'dn': {
            'description': 'Raw digital numbers',
            'toa': True
        },
        'volref': {
            'description':
            'Volumetric water reflectance - valid for water only',
            'arguments': [__toastring]
        },
        'wtemp': {
            'description':
            'Water temperature (atmospherically correct) - valid for water only',
            # It's not really TOA, but the product code will take care of atm correction itself
            'toa': True
        },
        #'Indices': {
        'bi': {
            'description': 'Brightness Index',
            'arguments': [__toastring]
        },
        'evi': {
            'description': 'Enhanced Vegetation Index',
            'arguments': [__toastring]
        },
        'lswi': {
            'description': 'Land Surface Water Index',
            'arguments': [__toastring]
        },
        'msavi2': {
            'description': 'Modified Soil-Adjusted Vegetation Index (revised)',
            'arguments': [__toastring]
        },
        'ndsi': {
            'description': 'Normalized Difference Snow Index',
            'arguments': [__toastring]
        },
        'ndvi': {
            'description': 'Normalized Difference Vegetation Index',
            'arguments': [__toastring]
        },
        'ndwi': {
            'description': 'Normalized Difference Water Index',
            'arguments': [__toastring]
        },
        'satvi': {
            'description': 'Soil-Adjusted Total Vegetation Index',
            'arguments': [__toastring]
        },
        #'Tillage Indices': {
        'ndti': {
            'description': 'Normalized Difference Tillage Index',
            'arguments': [__toastring]
        },
        'crc': {
            'description': 'Crop Residue Cover',
            'arguments': [__toastring]
        },
        'sti': {
            'description': 'Standard Tillage Index',
            'arguments': [__toastring]
        },
        'isti': {
            'description': 'Inverse Standard Tillage Index',
            'arguments': [__toastring]
        },
    }

    def process(self, products=None, overwrite=False, **kwargs):
        """ Make sure all products have been processed """
        products = super(landsatData, self).process(products, overwrite,
                                                    **kwargs)
        if len(products) == 0:
            return

        start = datetime.now()

        # Add the sensor for this date to the basename
        self.basename = self.basename + '_' + self.sensor_set[0]

        # Read the assets
        try:
            img = self._readraw()
        except Exception, e:
            VerboseOut(traceback.format_exc(), 5)
            raise Exception('Error reading %s: %s' %
                            (basename(self.assets[''].filename), e))

        meta = self.assets[''].meta
        visbands = self.assets[''].visbands
        lwbands = self.assets[''].lwbands
        md = self.meta_dict()

        # running atmosphere if any products require it
        toa = True
        for val in products.requested.values():
            toa = toa and (self._products[val[0]].get('toa', False)
                           or 'toa' in val)
        if not toa:
            start = datetime.now()
            if not settings().REPOS[self.Repository.name]['6S']:
                raise Exception('6S is required for atmospheric correction')
            try:
                wvlens = [(meta[b]['wvlen1'], meta[b]['wvlen2'])
                          for b in visbands]
                geo = self.metadata['geometry']
                atm6s = SIXS(visbands,
                             wvlens,
                             geo,
                             self.metadata['datetime'],
                             sensor=self.sensor_set[0])
                md["AOD Source"] = str(atm6s.aod[0])
                md["AOD Value"] = str(atm6s.aod[1])
            except Exception, e:
                VerboseOut(traceback.format_exc(), 4)
                raise Exception('Problem running 6S atmospheric model: %s' % e)
예제 #17
0
                    prodout = Indices(
                        img, dict(zip([p[0] for p in indices.values()],
                                      fnames)), md)
                    prodout = dict(zip(indices.keys(), prodout.values()))
                    [
                        self.AddFile(sensor, key, fname)
                        for key, fname in prodout.items()
                    ]
                VerboseOut(
                    ' -> %s: processed %s in %s' %
                    (self.basename, indices0.keys(), datetime.now() - start),
                    1)
            img = None
            # cleanup directory
            try:
                if settings().REPOS[self.Repository.name.lower()]['extract']:
                    for bname in self.assets['DN'].datafiles():
                        if bname[-7:] != 'MTL.txt':
                            files = glob.glob(
                                os.path.join(self.path, bname) + '*')
                            RemoveFiles(files)
                shutil.rmtree(os.path.join(self.path, 'modtran'))
            except:
                # VerboseOut(traceback.format_exc(), 4)
                pass

    def filter(self, pclouds=100, sensors=None, **kwargs):
        """ Check if tile passes filter """
        if pclouds < 100:
            self.meta()
            if self.metadata['clouds'] > pclouds:
예제 #18
0
def main():
    import gips
    title = 'GIPS Configuration Utility (v%s)' % (version)

    parser = GIPSParser(description=title, datasources=False)
    subparser = parser.add_subparsers(dest='command')
    subparser.add_parser('print', help='Print current settings')
    p = subparser.add_parser(
        'env', help='Configure GIPS repositories in this environment')
    p.add_argument('-r',
                   '--repos',
                   help='Top level directory for repositories',
                   default='/data/repos')
    p.add_argument('-e',
                   '--email',
                   help='Set email address (used for anonymous FTP sources)',
                   default='')
    p = subparser.add_parser(
        'user',
        help=
        'Configure GIPS repositories for this user (for per user customizations)'
    )
    #p.add_argument('-e', '--email', help='Set email address (used for anonymous FTP sources)')
    #h = 'Install full configuration file without inheriting from environment settings'
    #p.add_argument('-f', '--full', help=h, default=False, action='store_true')
    args = parser.parse_args()
    print title

    utils.gips_script_setup(
        driver_string=None,  # NOTE: no driver string for gips_config
        stop_on_error=args.stop_on_error,
        setup_orm=False,  # NOTE: ORM cannot be setup before `gips_config env`
    )  # has been run

    if args.command == 'print':
        with utils.error_handler('Unable to access settings'):
            from gips.utils import settings
            s = settings()
            for v in dir(s):
                if not v.startswith('__') and v != 'gips':
                    print
                    print v
                    exec('pprint.pprint(s.%s)' % v)

    elif args.command == 'env':
        with utils.error_handler('Could not create environment settings'):
            created_cf, cfgfile = create_environment_settings(args.repos,
                                                              email=args.email)

    elif args.command == 'user':
        with utils.error_handler('Could not create user settings'):
            # first try importing environment settings
            import gips.settings
            created_cf, cfgfile = create_user_settings()

    if args.command in ('user', 'env'):
        msg = ('Wrote new config file:  {}.' if created_cf else
               'Found existing config, left unmodified:  {}.')
        print msg.format(cfgfile)
        with utils.error_handler('Could not create repos'):
            print 'Creating repository directories, if needed.'
            try:
                create_repos()
            except:
                if created_cf:
                    print(
                        'Error; removing (likely broken) config file:'
                        '  {}.'.format(cfgfile))
                    os.remove(cfgfile)
                raise
        with utils.error_handler('Could not migrate database'):
            migrate_database()

    utils.gips_exit()
예제 #19
0
    def process(self, products=None, overwrite=False, **kwargs):
        """ Make sure all products have been processed """
        products = super(landsatData, self).process(products, overwrite,
                                                    **kwargs)
        if len(products) == 0:
            return

        start = datetime.now()

        assets = set()
        for key, val in products.requested.items():
            assets.update(self._products[val[0]]['assets'])

        if len(assets) != 1:
            raise Exception(
                'This driver does not support creation of products from different Assets at the same time'
            )

        asset = list(assets)[0]

        # TODO: De-hack this
        # Better approach, but needs some thought, is to loop over assets
        # Ian, you are right. I just don't have enough time to do it.

        if asset == 'SR':

            datafiles = self.assets['SR'].datafiles()

            imgpaths = dict()

            for datafile in datafiles:

                key = datafile.partition('_')[2].split('.')[0]
                path = os.path.join('/vsitar/' + self.assets['SR'].filename,
                                    datafile)

                imgpaths[key] = path

            # print imgpaths

            bname = os.path.join(self.path, self.basename)

            for key, val in products.requested.items():

                if val[0] == "ndvi8sr":

                    sensor = 'LC8SR'
                    fname = '%s_%s_%s' % (bname, sensor, key)

                    img = gippy.GeoImage(
                        [imgpaths['sr_band4'], imgpaths['sr_band5']])

                    missing = float(img[0].NoDataValue())

                    red = img[0].Read().astype('float32')
                    nir = img[1].Read().astype('float32')

                    wvalid = numpy.where((red != missing) & (nir != missing)
                                         & (red + nir != 0.0))

                    red[wvalid] *= 1.E-4
                    nir[wvalid] *= 1.E-4

                    # TODO: change this so that these pixels become missing
                    red[(red != missing) & (red < 0.0)] = 0.0
                    red[red > 1.0] = 1.0
                    nir[(nir != missing) & (nir < 0.0)] = 0.0
                    nir[nir > 1.0] = 1.0

                    ndvi = missing + numpy.zeros_like(red)
                    ndvi[wvalid] = (nir[wvalid] - red[wvalid]) / (nir[wvalid] +
                                                                  red[wvalid])

                    # set_trace()

                    VerboseOut("writing " + fname, 2)
                    imgout = gippy.GeoImage(fname, img, gippy.GDT_Float32, 1)
                    imgout.SetNoData(-9999.)
                    imgout.SetOffset(0.0)
                    imgout.SetGain(1.0)
                    imgout.SetBandName('NDVI', 1)
                    imgout[0].Write(ndvi)

                if val[0] == "landmask":

                    sensor = 'LC8SR'
                    fname = '%s_%s_%s' % (bname, sensor, key)

                    img = gippy.GeoImage(
                        [imgpaths['cfmask'], imgpaths['cfmask_conf']])

                    cfmask = img[0].Read()
                    # array([  0,   1,   2,   3,   4, 255], dtype=uint8)
                    # 0 means clear! but I want 1 to mean clear

                    cfmask[cfmask > 0] = 2
                    cfmask[cfmask == 0] = 1
                    cfmask[cfmask == 2] = 0

                    VerboseOut("writing " + fname, 2)
                    imgout = gippy.GeoImage(fname, img, gippy.GDT_Byte, 1)
                    imgout.SetBandName('Land mask', 1)
                    imgout[0].Write(cfmask)

        elif asset == 'DN':

            # This block contains everything that existed in the first generation Landsat driver

            # Add the sensor for this date to the basename
            self.basename = self.basename + '_' + self.sensors[asset]

            # Read the assets
            try:
                img = self._readraw()
            except Exception, e:
                VerboseOut(traceback.format_exc(), 5)
                raise Exception('Error reading %s: %s' %
                                (basename(self.assets['DN'].filename), e))

            meta = self.assets['DN'].meta
            visbands = self.assets['DN'].visbands
            lwbands = self.assets['DN'].lwbands
            md = self.meta_dict()

            # running atmosphere if any products require it
            toa = True
            for val in products.requested.values():
                toa = toa and (self._products[val[0]].get('toa', False)
                               or 'toa' in val)
            if not toa:
                start = datetime.now()

                if not settings().REPOS[self.Repository.name.lower()]['6S']:
                    raise Exception(
                        '6S is required for atmospheric correction')
                try:
                    wvlens = [(meta[b]['wvlen1'], meta[b]['wvlen2'])
                              for b in visbands]
                    geo = self.metadata['geometry']
                    atm6s = SIXS(visbands,
                                 wvlens,
                                 geo,
                                 self.metadata['datetime'],
                                 sensor=self.sensor_set[0])
                    md["AOD Source"] = str(atm6s.aod[0])
                    md["AOD Value"] = str(atm6s.aod[1])
                except Exception, e:
                    VerboseOut(traceback.format_exc(), 4)
                    raise Exception(
                        'Problem running 6S atmospheric model: %s' % e)
예제 #20
0
def use_orm():
    """Check GIPS_ORM to see if the user wants to use the Django ORM.

    Defaults to True.
    """
    return getattr(utils.settings(), 'GIPS_ORM', True)
예제 #21
0
파일: core.py 프로젝트: danlopez00/gips
 def repo(cls):
     """ Get dictionary of repository settings """
     return settings().REPOS[cls.name]
예제 #22
0
파일: landsat.py 프로젝트: demiurg/gips
                    prodout = Indices(reflimg, dict(zip([p[0] for p in indices_toa.values()], fnames)), md)
                    prodout = dict(zip(indices_toa.keys(), prodout.values()))
                    [self.AddFile(sensor, key, fname) for key, fname in prodout.items()]
                # Run atmospherically corrected
                if len(indices) > 0:
                    fnames = [os.path.join(self.path, self.basename + '_' + key) for key in indices]
                    for col in visbands:
                        img[col] = ((img[col] - atm6s.results[col][1]) / atm6s.results[col][0]) * (1.0 / atm6s.results[col][2])
                    prodout = Indices(img, dict(zip([p[0] for p in indices.values()], fnames)), md)
                    prodout = dict(zip(indices.keys(), prodout.values()))
                    [self.AddFile(sensor, key, fname) for key, fname in prodout.items()]
                VerboseOut(' -> %s: processed %s in %s' % (self.basename, indices0.keys(), datetime.now() - start), 1)
            img = None
            # cleanup directory
            try:
                if settings().REPOS[self.Repository.name.lower()]['extract']:
                    for bname in self.assets['DN'].datafiles():
                        if bname[-7:] != 'MTL.txt':
                            files = glob.glob(os.path.join(self.path, bname) + '*')
                            RemoveFiles(files)
                shutil.rmtree(os.path.join(self.path, 'modtran'))
            except:
                # VerboseOut(traceback.format_exc(), 4)
                pass

    def filter(self, pclouds=100, sensors=None, **kwargs):
        """ Check if tile passes filter """
        if pclouds < 100:
            self.meta()
            if self.metadata['clouds'] > pclouds:
                return False
예제 #23
0
파일: landsat.py 프로젝트: demiurg/gips
    def process(self, products=None, overwrite=False, **kwargs):
        """ Make sure all products have been processed """
        products = super(landsatData, self).process(products, overwrite, **kwargs)
        if len(products) == 0:
            return

        start = datetime.now()

        assets = set()
        for key, val in products.requested.items():
            assets.update(self._products[val[0]]['assets'])

        if len(assets) != 1:
            raise Exception('This driver does not support creation of products from different Assets at the same time')

        asset = list(assets)[0]

        # TODO: De-hack this
        # Better approach, but needs some thought, is to loop over assets
        # Ian, you are right. I just don't have enough time to do it.

        if asset == 'SR':

            datafiles = self.assets['SR'].datafiles()

            imgpaths = dict()

            for datafile in datafiles:

                key = datafile.partition('_')[2].split('.')[0]
                path = os.path.join('/vsitar/' + self.assets['SR'].filename, datafile)

                imgpaths[key] = path

            # print imgpaths

            bname = os.path.join(self.path, self.basename)

            for key, val in products.requested.items():

                if val[0] == "ndvi8sr":

                    sensor = 'LC8SR'
                    fname = '%s_%s_%s' % (bname, sensor, key)

                    img = gippy.GeoImage([imgpaths['sr_band4'], imgpaths['sr_band5']])

                    missing = float(img[0].NoDataValue())

                    red = img[0].Read().astype('float32')
                    nir = img[1].Read().astype('float32')

                    wvalid = numpy.where((red != missing) & (nir != missing) & (red + nir != 0.0))

                    red[wvalid] *= 1.E-4
                    nir[wvalid] *= 1.E-4

                    # TODO: change this so that these pixels become missing
                    red[(red != missing) & (red < 0.0)] = 0.0
                    red[red > 1.0] = 1.0
                    nir[(nir != missing) & (nir < 0.0)] = 0.0
                    nir[nir > 1.0] = 1.0

                    ndvi = missing + numpy.zeros_like(red)
                    ndvi[wvalid] = (nir[wvalid] - red[wvalid])/(nir[wvalid] + red[wvalid])

                    # set_trace()

                    VerboseOut("writing " + fname, 2)
                    imgout = gippy.GeoImage(fname, img, gippy.GDT_Float32, 1)
                    imgout.SetNoData(-9999.)
                    imgout.SetOffset(0.0)
                    imgout.SetGain(1.0)
                    imgout.SetBandName('NDVI', 1)
                    imgout[0].Write(ndvi)

                if val[0] == "landmask":

                    sensor = 'LC8SR'
                    fname = '%s_%s_%s' % (bname, sensor, key)

                    img = gippy.GeoImage([imgpaths['cfmask'], imgpaths['cfmask_conf']])

                    cfmask = img[0].Read()
                    # array([  0,   1,   2,   3,   4, 255], dtype=uint8)
                    # 0 means clear! but I want 1 to mean clear

                    cfmask[cfmask > 0] = 2
                    cfmask[cfmask == 0] = 1
                    cfmask[cfmask == 2] = 0

                    VerboseOut("writing " + fname, 2)
                    imgout = gippy.GeoImage(fname, img, gippy.GDT_Byte, 1)
                    imgout.SetBandName('Land mask', 1)
                    imgout[0].Write(cfmask)


        elif asset == 'DN':

            # This block contains everything that existed in the first generation Landsat driver

            # Add the sensor for this date to the basename
            self.basename = self.basename + '_' + self.sensors[asset]

            # Read the assets
            try:
                img = self._readraw()
            except Exception, e:
                VerboseOut(traceback.format_exc(), 5)
                raise Exception('Error reading %s: %s' % (basename(self.assets['DN'].filename), e))

            meta = self.assets['DN'].meta
            visbands = self.assets['DN'].visbands
            lwbands = self.assets['DN'].lwbands
            md = self.meta_dict()

            # running atmosphere if any products require it
            toa = True
            for val in products.requested.values():
                toa = toa and (self._products[val[0]].get('toa', False) or 'toa' in val)
            if not toa:
                start = datetime.now()

                if not settings().REPOS[self.Repository.name.lower()]['6S']:
                    raise Exception('6S is required for atmospheric correction')
                try:
                    wvlens = [(meta[b]['wvlen1'], meta[b]['wvlen2']) for b in visbands]
                    geo = self.metadata['geometry']
                    atm6s = SIXS(visbands, wvlens, geo, self.metadata['datetime'], sensor=self.sensor_set[0])
                    md["AOD Source"] = str(atm6s.aod[0])
                    md["AOD Value"] = str(atm6s.aod[1])
                except Exception, e:
                    VerboseOut(traceback.format_exc(), 4)
                    raise Exception('Problem running 6S atmospheric model: %s' % e)
예제 #24
0
def process_acolite(asset, aco_proc_dir, products, meta, model_image,
                    extracted_asset_glob=''):
    """Generate acolite products from the given asset.

    Args:
        asset:  Asset instance
        aco_proc_dir:  Location to put intermediate files; tempdir is
            suggested, and the caller is responsible for disposing of it
        products:  dict specifying product type strings & paths to
            destination product files
        meta:  dict defining metadata to add to the product files
        model_image:  A GeoImage suitable for basing the output products on.
        extracted_asset_glob:  If needed, pass in a glob to help acolite find
            extracted asset data.

    Returns:  mapping of generated product type strings to filenames;
        Data.AddFile() ready.
    """
    # FILTER PRODUCTS FOR THOSE SUPPORTED BY CURRENT ASSET
    valid_products = {}
    for k, v in products.items():
        valid_sensors = _aco_prod_templs[k].get('sensors', None)
        if valid_sensors and asset.sensor not in valid_sensors:
            verbose_out('{} not valid for {}, skipping.'.format(
                k, asset.sensor), 2)
        else:
            valid_products[k] = v

    prod_args = [_aco_prod_templs[k]['acolite-product']
                        for k in products if k != 'acoflags']
    asset_dn = os.path.join(aco_proc_dir, 'asset')
    os.mkdir(asset_dn)
    output_dn = os.path.join(aco_proc_dir, 'output')
    os.mkdir(output_dn)

    # EXTRACT ASSET
    verbose_out('acolite processing:  Extracting {} to {}'.format(
                asset.filename, asset_dn), 2)
    # TODO there may be a way to avoid extracting sometimes; would save time:
    # some assets will come pre-extracted; use those dirs accordingly
    asset.extract(path=asset_dn)
    verbose_out('acolite processing:  Finished extracting {} to {}'.format(
                asset.filename, asset_dn), 2)

    # PROCESS SETTINGS TEMPLATE FOR SPECIFIED PRODUCTS
    settings_path = os.path.join(aco_proc_dir, 'settings.cfg')
    # acoflags is always internally generated by ACOLITE
    prod_args = [_aco_prod_templs[k]['acolite-product']
                        for k in valid_products if k != 'acoflags']
    if len(prod_args) == 0:
        raise Exception("ACOLITE: Must specify at least 1 product."
                        "  'acoflags' cannot be generated on its own.")
    with open(settings_path, 'w') as settings_fo:
        print('l2w_mask=True\nl2w_mask_wave=1609\nl2w_mask_threshold=0.05',
              file=settings_fo)
        # xy_output=True may want this; writes easting & northing to netcdfs
        for s in ('l2w_parameters=' + ','.join(prod_args),
                  'output=' + output_dn):
            print(s, file=settings_fo)

    with open(settings_path, 'r') as settings_fo:
        verbose_out('acolite processing:  ====== begin acolite.cfg ======', 4)
        verbose_out(settings_fo.read(), 4)
        verbose_out('acolite processing:  ====== end acolite.cfg ======', 4)

    eag_fp = os.path.join(asset_dn, extracted_asset_glob)
    eag_rv = glob.glob(eag_fp)
    if len(eag_rv) != 1:
        raise IOError("Expected exactly one asset glob for"
                      " {}, found {}".format(eag_fp, eag_rv))
    extracted_asset_fp = eag_rv[0]

    # PROCESS VIA ACOLITE CALL
    # TODO options we now lack, not sure if want:
    # -IDL_CPU_TPOOL_NTHREADS 1 run=1
    cmd = '{} --cli --nogfx --images={} --settings={}'.format(
            os.path.join(utils.settings().ACOLITE['dir'], 'acolite'),
            extracted_asset_fp, settings_path)
    verbose_out('acolite processing:  starting acolite: `{}`'.format(cmd), 2)

    status, output = commands.getstatusoutput(cmd)

    verbose_out('acolite processing:  ====== begin acolite output ======', 4)
    verbose_out(output, 4)
    verbose_out('acolite processing:  ====== end acolite output ======', 4)
    if status != 0:
        raise RuntimeError("Got exit status {} from `{}`".format(status, cmd))

    # EXTRACT IMAGES FROM NETCDF AND COMBINE MULTI-IMAGE PRODUCTS INTO
    # A MULTI-BAND TIF, ADD METADATA, and MOVE INTO TILES
    verbose_out('acolite processing:  acolite completed;'
                ' starting conversion from netcdf into gips products', 2)
    aco_nc_file = next(glob.iglob(
            os.path.join(output_dn, '*' + asset.tile + '*_L2W.nc')))

    prodout = acolite_nc_to_prods(
            valid_products, aco_nc_file, meta, model_image)
    verbose_out('acolite processing:  finishing;'
                ' {} products completed'.format(len(valid_products)), 2)
    return prodout