コード例 #1
1
    def __init__(self,
                 bandnums,
                 wavelengths,
                 geometry,
                 date_time,
                 sensor=None):
        """ Run SixS atmospheric model using Py6S """
        start = datetime.datetime.now()
        VerboseOut('Running atmospheric model (6S)', 2)

        s = SixS()
        # Geometry
        s.geometry = Geometry.User()
        s.geometry.from_time_and_location(geometry['lat'], geometry['lon'],
                                          str(date_time), geometry['zenith'],
                                          geometry['azimuth'])
        s.altitudes = Altitudes()
        s.altitudes.set_target_sea_level()
        s.altitudes.set_sensor_satellite_level()

        doy = (date_time - datetime.datetime(date_time.year, 1, 1)).days + 1
        # Atmospheric profile
        s.atmos_profile = atmospheric_model(doy, geometry['lat'])

        # Aerosols
        # TODO - dynamically adjust AeroProfile?
        s.aero_profile = AeroProfile.PredefinedType(AeroProfile.Continental)

        self.aod = aodData.get_aod(geometry['lat'], geometry['lon'],
                                   date_time.date())
        s.aot550 = self.aod[1]

        # Other settings
        s.ground_reflectance = GroundReflectance.HomogeneousLambertian(
            GroundReflectance.GreenVegetation)
        s.atmos_corr = AtmosCorr.AtmosCorrLambertianFromRadiance(1.0)

        # Used for testing
        try:
            stdout = sys.stdout
            funcs = {
                'LT5': SixSHelpers.Wavelengths.run_landsat_tm,
                'LT7': SixSHelpers.Wavelengths.run_landsat_etm,
                # LC8 doesn't seem to work
                #'LC8': SixSHelpers.Wavelengths.run_landsat_oli
            }
            if sensor in funcs.keys():
                sys.stdout = open(os.devnull, 'w')
                wvlens, outputs = funcs[sensor](s)
                sys.stdout = stdout
            else:
                # Use wavelengths
                outputs = []
                for wv in wavelengths:
                    s.wavelength = Wavelength(wv[0], wv[1])
                    s.run()
                    outputs.append(s.outputs)
        except Exception, e:
            sys.stdout = stdout
            raise AtmCorrException("Error running 6S: %s" % e)
コード例 #2
0
ファイル: tiles.py プロジェクト: dchowdhury/gips
 def mosaic(self,
            datadir,
            res=None,
            interpolation=0,
            crop=False,
            overwrite=False):
     """ Combine tiles into a single mosaic, warp if res provided """
     if self.spatial.site is None:
         raise Exception('Site required for creating mosaics')
     start = datetime.now()
     mkdir(datadir)
     bname = self.date.strftime('%Y%j')
     for product in self.products.products:
         sensor = self.which_sensor(product)
         if sensor is None:
             continue
         # TODO - this is assuming a tif file.  Use gippy FileExtension function when it is exposed
         fout = os.path.join(datadir, '%s_%s_%s' %
                             (bname, sensor, product)) + '.tif'
         if not os.path.exists(fout) or overwrite:
             try:
                 filenames = [
                     self.tiles[t].filenames[(sensor, product)]
                     for t in self.tiles
                 ]
                 images = gippy.GeoImages(filenames)
                 if self.spatial.site is not None and res is not None:
                     CookieCutter(images, self.spatial.site, fout, res[0],
                                  res[1], crop, interpolation)
                 else:
                     mosaic(images, fout, self.spatial.site)
             except Exception, e:
                 VerboseOut(traceback.format_exc(), 4)
                 VerboseOut("Error mosaicking %s: %s" % (fout, e))
コード例 #3
0
ファイル: core.py プロジェクト: aniucd/gips
    def fetch_ftp(cls, asset, tile, date):
        """ Fetch via FTP """
        url = cls._assets[asset].get('url', '')
        if url == '':
            raise Exception("%s: URL not defined for asset %s" % (cls.__name__, asset))
        VerboseOut('%s: fetch tile %s for %s' % (asset, tile, date), 3)
        ftpurl = url.split('/')[0]
        ftpdir = url[len(ftpurl):]
        try:
            ftp = ftplib.FTP(ftpurl)
            ftp.login('anonymous', settings().EMAIL)
            pth = os.path.join(ftpdir, date.strftime('%Y'), date.strftime('%j'))
            ftp.set_pasv(True)
            ftp.cwd(pth)

            filenames = []
            ftp.retrlines('LIST', filenames.append)

            for f in ftp.nlst('*'):
                VerboseOut("Downloading %s" % f, 2)
                ftp.retrbinary('RETR %s' % f, open(os.path.join(cls.Repository.path('stage'), f), "wb").write)
            ftp.close()
        except Exception, e:
            VerboseOut(traceback.format_exc(), 4)
            raise Exception("Error downloading: %s" % e)
コード例 #4
0
ファイル: core.py プロジェクト: aniucd/gips
 def ParseAndAddFiles(self, filenames=None):
     """ Parse and Add filenames to existing filenames """
     if filenames is None:
         filenames = self.find_files()
     datedir = self.Repository._datedir
     for f in filenames:
         bname = basename(f)
         parts = bname.split('_')
         if len(parts) < 3 or len(parts) > 4:
             # Skip this file
             VerboseOut('Unrecognizable file: %s' % f, 3)
             continue
         offset = 1 if len(parts) == 4 else 0
         try:
             if self.date is None:
                 # First time through
                 self.date = datetime.strptime(parts[0 + offset], datedir).date()
             else:
                 date = datetime.strptime(parts[0 + offset], datedir).date()
                 if date != self.date:
                     raise Exception('Mismatched dates: %s' % ' '.join(filenames))
             sensor = parts[1 + offset]
             product = parts[2 + offset]
             self.AddFile(sensor, product, f)
         except Exception:
             # This was just a bad file
             VerboseOut('Unrecognizable file: %s' % f, 3)
             continue
コード例 #5
0
ファイル: core.py プロジェクト: aniucd/gips
    def archive(cls, path='.', recursive=False, keep=False, update=False, **kwargs):
        """ Move assets from directory to archive location """
        start = datetime.now()

        fnames = []
        if recursive:
            for root, subdirs, files in os.walk(path):
                for a in cls._assets.values():
                    fnames.extend(glob.glob(os.path.join(root, a['pattern'])))
        else:
            for a in cls._assets.values():
                fnames.extend(glob.glob(os.path.join(path, a['pattern'])))
        numlinks = 0
        numfiles = 0
        assets = []
        for f in fnames:
            archived = cls._archivefile(f, update)
            if archived[1] >= 0:
                if not keep:
                    RemoveFiles([f], ['.index', '.aux.xml'])
            if archived[1] > 0:
                numfiles = numfiles + 1
                numlinks = numlinks + archived[1]
                assets.append(archived[0])

        # Summarize
        if numfiles > 0:
            VerboseOut('%s files (%s links) from %s added to archive in %s' %
                      (numfiles, numlinks, os.path.abspath(path), datetime.now() - start))
        if numfiles != len(fnames):
            VerboseOut('%s files not added to archive' % (len(fnames) - numfiles))
        return assets
コード例 #6
0
ファイル: project.py プロジェクト: ircwaves/gips
def main():
    title = Colors.BOLD + 'GIPS Data Project (v%s)' % __version__ + Colors.OFF

    # argument parsing
    parser0 = GIPSParser(description=title)
    parser0.add_inventory_parser(site_required=True)
    parser0.add_process_parser()
    parser0.add_project_parser()
    parser0.add_warp_parser()
    args = parser0.parse_args()

    try:
        print title
        cls = import_data_class(args.command)

        extents = SpatialExtent.factory(cls, args.site, args.key, args.where,
                                        args.tiles, args.pcov, args.ptile)

        # create tld: SITENAME--KEY_DATATYPE_SUFFIX
        if args.notld:
            tld = args.outdir
        else:
            key = '' if args.key == '' else '--' + args.key
            suffix = '' if args.suffix == '' else '_' + args.suffix
            res = '' if args.res is None else '_%sx%s' % (args.res[0],
                                                          args.res[1])
            bname = (extents[0].site.LayerName() + key + res + '_' +
                     args.command + suffix)
            tld = os.path.join(args.outdir, bname)

        for extent in extents:
            t_extent = TemporalExtent(args.dates, args.days)
            inv = DataInventory(cls, extent, t_extent, **vars(args))
            datadir = os.path.join(tld, extent.site.Value())
            if inv.numfiles > 0:
                inv.mosaic(
                    datadir=datadir,
                    tree=args.tree,
                    overwrite=args.overwrite,
                    res=args.res,
                    interpolation=args.interpolation,
                    crop=args.crop,
                    alltouch=args.alltouch,
                )
                inv = ProjectInventory(datadir)
                inv.pprint()
            else:
                VerboseOut(
                    'No data found for {} within temporal extent {}'.format(
                        str(t_extent), str(t_extent)),
                    2,
                )
    except Exception as e:
        import traceback
        VerboseOut(traceback.format_exc(), 4)
        print 'Data Project error: %s' % e
コード例 #7
0
ファイル: stats.py プロジェクト: ircwaves/gips
def main():
    title = Colors.BOLD + 'GIPS Image Statistics (v%s)' % __version__ + Colors.OFF

    parser0 = GIPSParser(datasources=False, description=title)
    parser0.add_default_parser()
    parser0.add_projdir_parser()
    group = parser0.add_argument_group('masking options')
    args = parser0.parse_args()

    # TODO - check that at least 1 of filemask or pmask is supplied

    try:
        print title
        header = ['min', 'max', 'mean', 'sd', 'skew', 'count']

        for projdir in args.projdir:
            VerboseOut('Stats for Project directory: %s' % projdir, 1)
            inv = ProjectInventory(projdir, args.products)

            files = {}
            for date in inv.dates:
                VerboseOut('Calculating statistics for %s' % date)
                for p in inv.products(date):
                    img = inv[date].open(p)
                    if p not in files.keys():
                        files[p] = open(
                            os.path.join(projdir, p + '_stats.txt'), 'w')
                        # write header
                        files[p].write('date ')
                        if img.NumBands() == 1:
                            files[p].write(' '.join(header))
                        else:
                            for band in img:
                                h = [
                                    band.Description() + "-" + a
                                    for a in header
                                ]
                                files[p].write(' '.join(h) + ' ')
                        files[p].write('\n')
                    # print date and stats
                    files[p].write(date.strftime('%Y-%j'))
                    for band in img:
                        stats = band.Stats()
                        [files[p].write(' ' + str(s)) for s in stats]
                        files[p].write(' ')
                    files[p].write('\n')
                    img = None
            for f in files:
                files[f].close()

    except Exception, e:
        import traceback
        VerboseOut(traceback.format_exc(), 4)
        print 'Error: %s' % e
コード例 #8
0
 def run(self, repos=None, **kwargs):
     repos = repos if repos is not None else gips.settings.REPOS
     for repo in repos:
         try:
             exec('from gips.data.%s import test' % repo.lower())
             print
             exec('test()')
         except Exception, e:
             VerboseOut('\n%s error: %s' % (repo, e))
             VerboseOut(traceback.format_exc(), 3)
             #print '\n%s: no test shapefile' % repo
             pass
コード例 #9
0
ファイル: core.py プロジェクト: aniucd/gips
 def _archivefile(cls, filename, update=False):
     """ archive specific file """
     bname = os.path.basename(filename)
     try:
         asset = cls(filename)
     except Exception, e:
         # if problem with inspection, move to quarantine
         VerboseOut(traceback.format_exc(), 3)
         qname = os.path.join(cls.Repository.path('quarantine'), bname)
         if not os.path.exists(qname):
             os.link(os.path.abspath(filename), qname)
         VerboseOut('%s -> quarantine (file error): %s' % (filename, e), 2)
         return (None, 0)
コード例 #10
0
 def process(self, *args, **kwargs):
     """ Process assets into requested products """
     # TODO - some check on if any processing was done
     start = dt.now()
     VerboseOut('Processing [%s] on %s dates (%s files)' % (self.products, len(self.dates), self.numfiles), 3)
     if len(self.products.standard) > 0:
         for date in self.dates:
             try:
                 self.data[date].process(*args, **kwargs)
             except:
                 VerboseOut(traceback.format_exc(), 4)
     if len(self.products.composite) > 0:
         self.dataclass.process_composites(self, self.products.composite, **kwargs)
     VerboseOut('Processing completed in %s' % (dt.now() - start), 2)
コード例 #11
0
    def __init__(self, filename):
        """ Inspect a single file and get some metadata """
        super(landsatAsset, self).__init__(filename)

        fname = os.path.basename(filename)

        VerboseOut(("fname", fname), 2)

        self.tile = fname[3:9]
        year = fname[9:13]
        doy = fname[13:16]
        self.date = datetime.strptime(year + doy, "%Y%j")

        if fnmatchcase(fname, self._assets['SR']['pattern']):
            VerboseOut('SR asset', 2)
            self.asset = 'SR'
            self.sensor = 'LC8SR'
            self.version = int(fname[20:22])
        elif fnmatchcase(fname, self._assets['DN']['pattern']):
            VerboseOut('DN asset', 2)
            self.asset = 'DN'
            self.sensor = fname[0:3]
            self.version = int(fname[19:21])
            # Landsat DN specific additions
            smeta = self._sensors[self.sensor]
            self.meta = {}
            for i, band in enumerate(smeta['colors']):
                wvlen = smeta['bandlocs'][i]
                self.meta[band] = {
                    'bandnum': i + 1,
                    'wvlen': wvlen,
                    'wvlen1': wvlen - smeta['bandwidths'][i] / 2.0,
                    'wvlen2': wvlen + smeta['bandwidths'][i] / 2.0,
                    'E': smeta['E'][i],
                    'K1': smeta['K1'][i],
                    'K2': smeta['K2'][i],
                }
            self.visbands = [
                col for col in smeta['colors'] if col[0:4] != "LWIR"
            ]
            self.lwbands = [
                col for col in smeta['colors'] if col[0:4] == "LWIR"
            ]
        else:
            msg = "No matching landsat asset type for '{}'".format(fname)
            raise RuntimeError(msg, filename)

        if self.sensor not in self._sensors.keys():
            raise Exception("Sensor %s not supported: %s" %
                            (self.sensor, filename))
コード例 #12
0
 def process(self, *args, **kwargs):
     """ Process assets into requested products """
     # TODO - some check on if any processing was done
     start = dt.now()
     VerboseOut(
         'Processing [%s] on %s dates (%s files)' %
         (self.products, len(self.dates), self.numfiles), 3)
     if len(self.products.standard) > 0:
         for date in self.dates:
             with utils.error_handler(continuable=True):
                 self.data[date].process(*args, **kwargs)
     if len(self.products.composite) > 0:
         self.dataclass.process_composites(self, self.products.composite,
                                           **kwargs)
     VerboseOut('Processing completed in %s' % (dt.now() - start), 2)
コード例 #13
0
ファイル: modis.py プロジェクト: aniucd/gips
    def fetch(cls, asset, tile, date):
        #super(modisAsset, cls).fetch(asset, tile, date)

        year, month, day = date.timetuple()[:3]
        mainurl = '%s/%s.%02d.%02d' % (cls._assets[asset]['url'], str(year),
                                       month, day)
        try:
            listing = urllib.urlopen(mainurl).readlines()
        except Exception:
            # MODIS servers do maintenance on wednesday
            raise Exception("Unable to access %s --- is it Wednesday?" %
                            mainurl)

        pattern = '(%s.A%s%s.%s.005.\d{13}.hdf)' % (
            asset, str(year), str(date.timetuple()[7]).zfill(3), tile)
        cpattern = re.compile(pattern)
        success = False

        for item in listing:
            if cpattern.search(item):
                if 'xml' in item:
                    continue
                name = cpattern.findall(item)[0]
                url = ''.join([mainurl, '/', name])
                outpath = os.path.join(cls.Repository.path('stage'), name)

                try:
                    #urllib.urlretrieve(url, outpath)
                    connection = urllib2.urlopen(url)
                    output = open(outpath, 'wb')
                    output.write(connection.read())
                    output.close()

                except Exception:
                    # TODO - implement pre-check to only attempt on valid dates
                    # then uncomment this
                    #raise Exception('Unable to retrieve %s from %s' % (name, url))
                    pass
                else:
                    VerboseOut('Retrieved %s' % name, 2)
                    success = True

        if not success:
            # TODO - implement pre-check to only attempt on valid dates then uncomment below
            #raise Exception('Unable to find remote match for %s at %s' % (pattern, mainurl))
            VerboseOut(
                'Unable to find remote match for %s at %s' %
                (pattern, mainurl), 4)
コード例 #14
0
ファイル: core.py プロジェクト: aniucd/gips
 def fetch(cls, products, tiles, textent):
     """ Download data for tiles and add to archive """
     assets = cls.products2assets(products)
     fetched = []
     for a in assets:
         for t in tiles:
             asset_dates = cls.Asset.dates(a, t, textent.datebounds, textent.daybounds)
             for d in asset_dates:
                 # if we don't have it already
                 if not cls.Asset.discover(t, d, a):
                     try:
                         cls.Asset.fetch(a, t, d)
                         fetched.append((a, t, d))
                     except Exception, e:
                         VerboseOut(traceback.format_exc(), 4)
                         VerboseOut('Problem fetching asset: %s' % e, 3)
コード例 #15
0
 def _read_point(cls, filename, roi, nodata):
     """ Read single point from mean/var file and return if valid, or mean/var of 3x3 neighborhood """
     if not os.path.exists(filename):
         return (numpy.nan, numpy.nan)
     try:
         img = gippy.GeoImage(filename)
         vals = img[0].Read(roi).squeeze()
         variances = img[1].Read(roi)
         vals[numpy.where(vals == nodata)] = numpy.nan
         variances[numpy.where(variances == nodata)] = numpy.nan
         val = numpy.nan
         var = numpy.nan
         if ~numpy.isnan(vals[1, 1]):
             val = vals[1, 1]
         elif numpy.any(~numpy.isnan(vals)):
             val = numpy.mean(vals[~numpy.isnan(vals)])
         if ~numpy.isnan(variances[1, 1]):
             var = variances[1, 1]
         elif numpy.any(~numpy.isnan(variances)):
             var = numpy.mean(variances[~numpy.isnan(variances)])
         img = None
         return (val, var)
     except:
         VerboseOut(traceback.format_exc(), 4)
         return (numpy.nan, numpy.nan)
コード例 #16
0
 def process_mean(cls, filenames, fout):
     """ Calculates mean of all filenames, and per pixel variances """
     start = datetime.datetime.now()
     if len(filenames) > 0:
         img = gippy.GeoImage(filenames)
         imgout = gippy.GeoImage(fout, img, gippy.GDT_Float32, 2)
         imgout.SetNoData(-32768)
         img.Mean(imgout[0])
         meanimg = imgout[0].Read()
         for band in range(0, img.NumBands()):
             data = img[band].Read()
             mask = img[band].DataMask()
             var = numpy.multiply(numpy.power(data - meanimg, 2), mask)
             if band == 0:
                 totalvar = var
                 counts = mask
             else:
                 totalvar = totalvar + var
                 counts = counts + mask
         inds = numpy.where(counts == 0)
         totalvar[inds] = -32768
         inds = numpy.where(counts != 0)
         totalvar[inds] = numpy.divide(totalvar[inds], counts[inds])
         imgout[1].Write(totalvar)
         t = datetime.datetime.now() - start
         VerboseOut('%s: mean/var for %s files processed in %s' %
                    (os.path.basename(fout), len(filenames), t))
     return imgout
コード例 #17
0
    def mosaic(self, datadir='./', tree=False, **kwargs):
        """ Create project files for data in inventory """
        # make sure products have been processed first
        self.process(overwrite=False)
        start = dt.now()
        VerboseOut('Creating mosaic project %s' % datadir, 2)
        VerboseOut('  Dates: %s' % self.datestr)
        VerboseOut('  Products: %s' % self.products)

        dout = datadir
        for d in self.dates:
            if tree:
                dout = os.path.join(datadir, d.strftime('%Y%j'))
            self.data[d].mosaic(dout, **kwargs)

        VerboseOut('Completed mosaic project in %s' % (dt.now() - start), 2)
コード例 #18
0
    def __init__(self,
                 dataclass,
                 spatial,
                 temporal,
                 products=None,
                 fetch=False,
                 **kwargs):
        """ Create a new inventory
        :dataclass: The Data class to use (e.g., LandsatData, ModisData)
        :spatial: The spatial extent requested
        :temporal: The temporal extent requested
        :products: List of requested products of interest
        :fetch: bool indicated if missing data should be downloaded
        """
        VerboseOut('Retrieving inventory for site %s' % spatial.sitename, 2)

        self.dataclass = dataclass
        Repository = dataclass.Asset.Repository
        self.spatial = spatial
        self.temporal = temporal
        self.products = dataclass.RequestedProducts(products)

        if fetch:
            try:
                dataclass.fetch(self.products.base, self.spatial.tiles,
                                self.temporal)
            except Exception, e:
                raise Exception('Error downloading %s: %s' %
                                (dataclass.name, e))
            dataclass.Asset.archive(Repository.path('stage'))
コード例 #19
0
ファイル: process.py プロジェクト: dchowdhury/gips
def main():
    title = Colors.BOLD + 'GIPS Data Processing (v%s)' % __version__ + Colors.OFF

    # argument parsing
    parser0 = GIPSParser(description=title)
    parser0.add_inventory_parser()
    parser0.add_process_parser()
    args = parser0.parse_args()

    try:
        print title
        cls = import_data_class(args.command)

        extents = SpatialExtent.factory(cls, args.site, args.key, args.where,
                                        args.tiles, args.pcov, args.ptile)
        for extent in extents:
            inv = DataInventory(cls, extent,
                                TemporalExtent(args.dates, args.days),
                                **vars(args))
            inv.process(overwrite=args.overwrite)

    except Exception, e:
        import traceback
        VerboseOut(traceback.format_exc(), 4)
        print 'Data processing error: %s' % e
コード例 #20
0
ファイル: archive.py プロジェクト: ircwaves/gips
def main():
    title = Colors.BOLD + 'GIPS Data Archive Utility (v%s)' % gipsversion + Colors.OFF

    # argument parsing
    parser0 = GIPSParser(description=title)
    parser = parser0.add_default_parser()
    group = parser.add_argument_group('archive options')
    group.add_argument('--keep',
                       help='Keep files after adding to archive',
                       default=False,
                       action='store_true')
    group.add_argument('--recursive',
                       help='Iterate through subdirectories',
                       default=False,
                       action='store_true')
    group.add_argument(
        '--update',
        help=
        'Update asset if newer version available, (must call gips_process to regenerate products',
        default=False,
        action='store_true')
    args = parser0.parse_args()

    try:
        print title
        cls = import_data_class(args.command)
        cls.Asset.archive(**vars(args))
    except Exception, e:
        import traceback
        VerboseOut(traceback.format_exc(), 4)
        print 'Data archive error: %s' % e
コード例 #21
0
    def __init__(self, projdir='', products=[]):
        """ Create inventory of a GIPS project directory """
        self.projdir = os.path.abspath(projdir)
        if not os.path.exists(self.projdir):
            raise Exception('Directory %s does not exist!' % self.projdir)

        self.data = {}
        product_set = set()
        sensor_set = set()
        try:
            for dat in Data.discover(self.projdir):
                self.data[dat.date] = dat
                # All products and sensors used across all dates
                product_set = product_set.union(dat.product_set)
                sensor_set = sensor_set.union(dat.sensor_set)

            if not products:
                products = list(product_set)
            self.requested_products = products
            self.sensors = sensor_set
        except:
            VerboseOut(traceback.format_exc(), 4)
            raise Exception(
                "%s does not appear to be a GIPS project directory" %
                self.projdir)
コード例 #22
0
 def pprint(self, md=False, size=False):
     """ Print the inventory """
     if len(self.data) == 0:
         print 'No matching files in inventory'
         return
     self.data[self.data.keys()[0]].pprint_asset_header()
     dformat = '%m-%d' if md else '%j'
     oldyear = 0
     formatstr = '{:<12}\n'
     colors = {k: self.color(k) for k in self.sensor_set}
     for date in self.dates:
         # if new year then write out the year
         if date.year != oldyear:
             sys.stdout.write(Colors.BOLD + formatstr.format(date.year) +
                              Colors.OFF)
         self.data[date].pprint(dformat, colors)
         oldyear = date.year
     if self.numfiles != 0:
         VerboseOut(
             "\n\n%s files on %s dates" % (self.numfiles, len(self.dates)),
             1)
     if size:
         filelist_gen = (tile.filenames.values() +
                         [a.filename for a in tile.assets.values()]
                         for tiles in self.data.values()
                         for tile in tiles.tiles.values())
         total_size = sum(
             sum(os.stat(f).st_size for f in fl) for fl in filelist_gen)
         sitename = self.spatial.sitename
         if sitename == 'tiles':
             sitename += str(self.spatial.tiles)
         print('{} includes {:.0f} Mebibytes of local gips archive data'.
               format(sitename, total_size / 2**20))
コード例 #23
0
ファイル: algorithm.py プロジェクト: indigo-ag/gips
    def main(cls):
        """ Main for algorithm classes """
        dhf = argparse.ArgumentDefaultsHelpFormatter

        # Top level parser
        parser = argparse.ArgumentParser(formatter_class=dhf,
                                         description=cls.info())
        parser.add_argument('-v',
                            '--verbose',
                            help='Verbosity - 0: quiet, 1: normal, 2+: debug',
                            default=1,
                            type=int)
        parser = cls.parser(parser)

        args = parser.parse_args()
        gippy.Options.SetVerbose(args.verbose)
        VerboseOut(cls.info())

        utils.gips_script_setup(driver_string=None, setup_orm=False)

        with utils.error_handler('Error in {}'.format(cls.name)):
            alg = cls(**vars(args))
            alg.run_command(**vars(args))

        utils.gips_exit()
コード例 #24
0
 def copy(self,
          dout,
          products,
          site=None,
          res=None,
          interpolation=0,
          crop=False,
          overwrite=False,
          tree=False):
     """ Copy products to new directory, warp to projection if given site """
     # TODO - allow hard and soft linking options
     if res is None:
         res = self.Asset._defaultresolution
         #VerboseOut('Using default resolution of %s x %s' % (res[0], res[1]))
     dout = os.path.join(dout, self.id)
     if tree:
         dout = os.path.join(dout, self.date.strftime('%Y%j'))
     mkdir(dout)
     products = self.RequestedProducts(products)
     bname = '%s_%s' % (self.id, self.date.strftime('%Y%j'))
     for p in products.requested:
         if p not in self.sensors:
             # this product is not available for this day
             continue
         sensor = self.sensors[p]
         fin = self.filenames[(sensor, p)]
         fout = os.path.join(dout, "%s_%s_%s.tif" % (bname, sensor, p))
         if not os.path.exists(fout) or overwrite:
             try:
                 if site is not None:
                     # warp just this tile
                     resampler = ['near', 'bilinear', 'cubic']
                     cmd = 'gdalwarp %s %s -t_srs "%s" -tr %s %s -r %s' % \
                            (fin, fout, site.Projection(), res[0], res[1], resampler[interpolation])
                     print cmd
                     #result = commands.getstatusoutput(cmd)
                 else:
                     gippy.GeoImage(fin).Process(fout)
                     #shutil.copyfile(fin, fout)
             except Exception:
                 VerboseOut(traceback.format_exc(), 4)
                 VerboseOut("Problem creating %s" % fout)
     procstr = 'copied' if site is None else 'warped'
     VerboseOut('%s tile %s: %s files %s' %
                (self.date, self.id, len(products.requested), procstr))
コード例 #25
0
ファイル: core.py プロジェクト: aniucd/gips
 def asset_filenames(self, product):
     assets = self._products[product]['assets']
     filenames = []
     for asset in assets:
         filenames.extend(self.assets[asset].datafiles())
     if len(filenames) == 0:
         VerboseOut('There are no available assets on %s for tile %s' % (str(self.date), str(self.id), ), 3)
         return None
     return filenames
コード例 #26
0
ファイル: process.py プロジェクト: ircwaves/gips
def main():
    title = Colors.BOLD + 'GIPS Data Processing (v%s)' % __version__ + Colors.OFF

    # argument parsing
    parser0 = GIPSParser(description=title)
    parser0.add_inventory_parser()
    parser0.add_process_parser()
    args = parser0.parse_args()

    try:
        print title
        cls = import_data_class(args.command)

        extents = SpatialExtent.factory(
            cls, args.site, args.key, args.where, args.tiles, args.pcov,
            args.ptile
        )
        batchargs = None
        if args.batchout:
            tdl = []
            batchargs = '--chunksize ' + str(args.chunksize)
            batchargs += ' --format ' + str(args.format)
            if args.overwrite:
                batchargs += ' --overwrite '
            if args.products:
                batchargs += ' -p ' + ' '.join(args.products)

        for extent in extents:
            inv = DataInventory(
                cls, extent,
                TemporalExtent(args.dates, args.days), **vars(args)
            )
            if args.batchout:
                tdl = reduce(
                    list.__add__,
                    map(
                        lambda tiles: [
                            args.command + ' -t ' + str(tile) +
                            ' -d ' + str(tiles.date) + ' ' +
                            batchargs + '\n'
                            for tile in tiles.tiles.keys()
                        ],
                        inv.data.values(),
                    ),
                    tdl
                )

            else:
                inv.process(overwrite=args.overwrite)
        if args.batchout:
            with open(args.batchout, 'w') as ofile:
                ofile.writelines(tdl)

    except Exception, e:
        import traceback
        VerboseOut(traceback.format_exc(), 4)
        print 'Data processing error: %s' % e
コード例 #27
0
    def fetch(cls, asset, tile, date):

        # 'SR' not fetchable at the moment
        if asset == 'SR':
            VerboseOut('SR assets are never fetchable', 4)
            return
        paths_rows = tile[:3] + "," + tile[3:]
        fdate = date.strftime('%Y-%m-%d')

        s = search.Search()
        response = s.search(paths_rows=paths_rows,
                            start_date=fdate,
                            end_date=fdate,
                            cloud_max=90)
        if response['status'] == 'SUCCESS' and response['total'] > 0:
            VerboseOut('Fetching %s %s %s' % (asset, tile, fdate), 1)
            if response['total_returned'] != 1:
                raise Exception(
                    'Single date, single location, returned more than one result'
                )
            result = response['results'][0]
            cloudpct = result['cloud']
            sceneID = result['sceneID']
            stage_dir = os.path.join(cls.Repository.path(), 'stage')
            sceneIDs = [str(sceneID)]
            d = downloader.Downloader(download_dir=stage_dir)
            d.download(sceneIDs)
            # do the following because the downloaded .bz file has owner/group
            # settings that cause the GDAL virtual filesystem access to fail
            bz_path = glob.glob(os.path.join(stage_dir, sceneID + '*'))[0]
            gz_path = os.path.splitext(bz_path)[0] + ".gz"
            cmd = "tar xvfj %s -C %s |xargs tar cvfz %s -C %s" % (
                bz_path, stage_dir, gz_path, stage_dir)
            VerboseOut("Reformatting bz->gz", 1)
            result = commands.getstatusoutput(cmd)
            VerboseOut("removing %s" % bz_path)
            bands_path = glob.glob(os.path.join(stage_dir, sceneID + '_*.*'))
            # clean up - the .tar.gz will get moved on archive
            os.remove(bz_path)
            for band_path in bands_path:
                os.remove(band_path)
コード例 #28
0
ファイル: merra.py プロジェクト: ircwaves/gips
    def fetch(cls, asset, tile, date):
        """ Get this asset for this tile and date (using OpenDap service) """
        #super(MerraAsset, cls).fetch(asset, tile, date)

        if cls._assets[asset]['latency'] is None:
            assert date == datetime.datetime(1980, 1, 1)

        # if date > datetime.datetime.now():
        #     print "These data are not available for future dates."
        #     return None

        if date > (datetime.datetime.now() - datetime.timedelta(cls._assets[asset]['latency'])):
            print "These data are not available for specified dates."
            return None

        try:
            dataset = cls.opendap_fetch(asset, date)
        except Exception:
            print "Fetch: data not available", asset, tile, date
            return

        # Find the bounds of the tile requested
        bounds = cls.Repository.tile_bounds(tile)
        # TODO - get origin from shapefile
        ORIGIN = (-180., -90.)
        dx = bounds[2] - bounds[0]
        dy = bounds[3] - bounds[1]
        xsize = int(round(dx / cls._defaultresolution[0]))
        ysize = int(round(dy / cls._defaultresolution[1]))

        ix0 = int(round((bounds[0] - ORIGIN[0]) / cls._defaultresolution[0]))
        iy0 = int(round((bounds[1] - ORIGIN[1]) / cls._defaultresolution[1]))
        ix1 = ix0 + xsize
        iy1 = iy0 + ysize

        VerboseOut('Retrieving data for bounds (%s, %s) - (%s, %s)' % (bounds[0], bounds[1], bounds[2], bounds[3]), 3)

        data = dataset[asset][:, iy0:iy1, ix0:ix1].astype('float32')
        data = data[:, ::-1, :]

        # Save tile data
        description = cls._assets[asset]['description']
        meta = {'ASSET': asset, 'TILE': tile, 'DATE': str(date.date()), 'DESCRIPTION': description}
        doy = date.strftime('%j')
        fout = os.path.join(cls.Repository.path('stage'), "MERRA_%s_%s_%4d%s.tif" % (asset, tile, date.year, doy))

        # TODO: use gippy instead
        proj = raster.create_proj(4326)
        geo = (bounds[0], cls._defaultresolution[0], 0.0, bounds[3], 0.0, -cls._defaultresolution[1])
        print "writing", fout
        raster.write_raster(fout, data, proj, geo, meta, bandnames=cls._assets[asset]['bandnames'])
コード例 #29
0
ファイル: prism.py プロジェクト: ircwaves/gips
    def fetch_ftp(cls, asset, tile, date):
        """ Fetch via FTP """
        url = cls._assets[asset].get('url', '')
        if url == '':
            raise Exception("%s: URL not defined for asset %s" %
                            (cls.__name__, asset))
        VerboseOut('%s: fetch tile %s for %s' % (asset, tile, date), 3)
        if url.startswith('ftp://'):
            #drop ftp:// if given
            url = url[6:]
        ftpurl = url.split('/')[0]
        ftpdir = url[len(ftpurl):]
        try:
            ftp = ftplib.FTP(ftpurl)
            ftp.login('anonymous', settings().EMAIL)
            pth = os.path.join(ftpdir, date.strftime('%Y'))
            ftp.set_pasv(True)
            ftp.cwd(pth)

            filenames = []
            ftp.retrlines('LIST', filenames.append)
            filenames = map(lambda x: x.split(' ')[-1], filenames)
            filenames = filter(lambda x: date.strftime('%Y%m%d') in x,
                               filenames)
            if len(filenames) > 1:
                filenames = sorted(filenames,
                                   key=lambda x: prismAsset(x).ver_stab,
                                   reverse=True)
            filename = filenames[0]
            stagedir = tempfile.mkdtemp(prefix='prismDownloader',
                                        dir=cls.Repository.path('stage'))
            ofilename = os.path.join(stagedir, filename)
            VerboseOut("Downloading %s" % filename, 2)
            with open(ofilename, "wb") as ofile:
                ftp.retrbinary('RETR %s' % filename, ofile.write)
            ftp.close()
        except Exception, e:
            raise Exception("Error downloading: %s" % e)
コード例 #30
0
    def process(self, *args, **kwargs):
        """Produce requested products."""
        products = super(gpmData, self).process(*args, **kwargs)
        if len(products) == 0:
            return
        # example products.requested:
        # {'temp8tn': ['temp8tn'], 'clouds': ['clouds'], . . . }
        # key is only used once far below, and val is only used for val[0].
        for key, val in products.requested.items():
            start = datetime.datetime.now()
            prod_type = val[0]
            asset, missingassets, availassets, allsds = \
                self.asset_check(prod_type)

            if not availassets:
                # some products aren't available for every day but this is trying every day
                VerboseOut(
                    'There are no available assets (%s) on %s for tile %s' % (
                        str(missingassets),
                        str(self.date),
                        str(self.id),
                    ), 5)
                continue

            sensor = self._products[prod_type]['sensor']
            fname = self.temp_product_filename(
                sensor, prod_type)  # moved to archive at end of loop

            img = gippy.GeoImage(allsds)

            imgdata = img.Read()
            imgout = gippy.GeoImage(fname, img.XSize(), img.YSize(), 1,
                                    gippy.GDT_Float32)
            del img
            imgout.SetNoData(29999.0)
            imgout.SetOffset(0.0)
            imgout.SetGain(0.1)
            imgout.SetBandName('PrecipitationRate', 1)
            imgout.SetProjection(self._projection)
            imgout.SetAffine(
                np.array(self._products[prod_type]['_geotransform']))
            imgout[0].Write(imgdata)
            # add product to inventory
            archive_fp = self.archive_temp_path(fname)
            self.AddFile(sensor, key, archive_fp)
            del imgout  # to cover for GDAL's internal problems
            utils.verbose_out(' -> {}: processed in {}'.format(
                os.path.basename(fname),
                datetime.datetime.now() - start),
                              level=1)