コード例 #1
0
    def create(cls,
               impact_type,
               country,
               valid_from_date,
               valid_to_date,
               vampire_defaults=None):
        """ Create impact of the specified type.

        Creates the product impact identified by impact_type and sets initialization values.

        :param impact_type: Impact name.
        :type impact_type: string
        :param country: Country data product impact will be created for.
        :type country: string
        :param valid_from_date: Date product impact is valid from.
        :type valid_from_date: datetime
        :param valid_to_date: Date product impact is valid until.
        :type valid_to_date: string
        :param vampire_defaults:
        :type vampire_defaults: VampireDefaults
        :return: Product impact object of type specified (a registered subclass of BaseImpactProduct).
        """
        if impact_type not in cls.subclasses:
            raise ValueError('Bad impact type {}'.format(impact_type))
        if vampire_defaults is None:
            vp = VampireDefaults.VampireDefaults()
        else:
            vp = vampire_defaults

        return cls.subclasses[impact_type](country, valid_from_date,
                                           valid_to_date, vp)
コード例 #2
0
    def create(cls, server_type, vampire_defaults=None):
        if server_type not in cls.subclasses:
            raise ValueError('Bad server type {}'.format(server_type))
        if vampire_defaults is None:
            vp = VampireDefaults.VampireDefaults()
        else:
            vp = vampire_defaults

        return cls.subclasses[server_type](vp)
コード例 #3
0
    def create(cls, process_type, params, vampire_defaults=None):
        if process_type not in cls.subclasses:
            raise ValueError('Bad process type {}'.format(process_type))
        if vampire_defaults is None:
            vp = VampireDefaults.VampireDefaults()
        else:
            vp = vampire_defaults

        return cls.subclasses[process_type](params, vp)
def subsetByCountry(country):
    #LOG_FILENAME1 = os.path.join("D:\\PyCharm Projects\\SingleProject\\log", 'vampire_log_'+country+'_'+time.strftime("%Y%m%d")+'.log')
    #logging.basicConfig(filename=LOG_FILENAME1,level=logging.DEBUG)
    datelog = time.strftime('%c')
    vp = VampireDefaults.VampireDefaults()
    subsetFile = vp.get('CHIRPS_subset', country)
    geodatabase = vp.get('geodatabase', 'config')
    config = ast.literal_eval(geodatabase)
    file_path = config['gdbpath']
    directory = file_path + '\\' + country
    folderglobal = 'D:\\IDN_GIS\\01_Data\\01_Global\\VampireData'
    folder = 'D:\\IDN_GIS\\01_Data\\03_Regional\\' + country
    products = [
        'rainfall_anomaly_1_month', 'rainfall_anomaly_3_month',
        'rainfall_anomaly_dekad', 'spi_1_month', 'spi_3_month', 'spi_dekad'
    ]
    for product in products:
        productFolder = folder + "\\" + product
        countryData = []
        globalproductfolder = folderglobal + '\\' + 'global_' + product
        globaldata = []
        gdbname = directory + '\\' + product + '.gdb'
        MDS = gdbname + '\\' + product
        for gdata in os.listdir(globalproductfolder):
            if gdata.endswith(".tif") or gdata.endswith(".tiff"):
                NameConverting = country + '_cli_' + gdata
                globaldata.append(NameConverting)
        for data in os.listdir(productFolder):
            if data.endswith(".tif") or data.endswith(".tiff"):
                countryData.append(data)
        for i in globaldata:
            if i not in countryData:
                globalname1 = i.split("_")
                globalname1.remove(country)
                globalname1.remove('cli')
                globalname = "_".join(globalname1)
                gdataloc = os.path.join(globalproductfolder, globalname)
                arcpy.CheckOutExtension("spatial")
                extractbymask = ExtractByMask(gdataloc, subsetFile)
                extractbymask.save(os.path.join(productFolder, i))
                #logging.debug(datelog+" :updating " + product, globalname, gdbname, MDS)
                mosaicDataset.addRastertoMDS(MDS, productFolder)
                #mosaicDataset.addDateField(MDS)
                mosaicDataset.updateDateField(MDS, product)
                arcpy.CheckInExtension("spatial")
            else:
                logging.debug(datelog + " : " + i + " is available")
                print("data available")
        print("updating date")
コード例 #5
0
 def __init__(self, gis_server_type):
     self.server_type = gis_server_type
     # load default values from .ini file
     self.vampire = VampireDefaults.VampireDefaults()
     if self.vampire.get('vampire', 'gis_server').lower == 'arcgis':
         try:
             self.gis_server = ArcGISServerManager.ArcGISServerManager()
         except ImportError:
             print 'Missing libraries for ArcPy. These are required for GIS Server type "arcgis"'
             self.gis_server = None
     elif self.vampire.get('vampire', 'gis_server').lower == 'geoserver':
         self.gis_server = GeoserverManager.GeoserverManager()
     else:
         self.gis_server = None
     return
コード例 #6
0
def update_mosaic_vhi_crop(country):
    datelog = time.strftime('%c')
    vp = VampireDefaults.VampireDefaults()
    geodatabase = vp.get('geodatabase', 'config')
    config = ast.literal_eval(geodatabase)
    file_path = config['gdbpath']
    directory = file_path + '\\' + country
    folder = 'D:\\IDN_GIS\\01_Data\\03_Regional\\' + country
    products = ['vhi_crop_1_month']
    for product in products:
        productFolder = folder + "\\" + product
        countryData = []
        gdbname = directory + '\\' + product + '.gdb'
        MDS = gdbname + '\\' + product
        mosaicDataset.addRastertoMDS(MDS, productFolder)
        #mosaicDataset.addDateField(MDS)
        mosaicDataset.updateDateField(MDS, product)
        arcpy.CheckInExtension("spatial")
        print("updating date")
コード例 #7
0
def subsetdslr(countrycode, threshold):
    vp = VampireDefaults.VampireDefaults()
    subsetFile = vp.get('IMERG_subset', countrycode)
    logging.debug(datelog + " : start subset data for country " + countrycode +
                  " on " + str(threshold) + " threshold--------")
    globalfolder = "D:\\IDN_GIS\\01_Data\\01_Global\\VampireData\\global_dslr_" + str(
        threshold) + "mm"
    countryfolder = "D:\\IDN_GIS\\01_Data\\03_Regional\\" + countrycode + "\\DSLR_" + str(
        threshold).zfill(2) + "mm"
    MDS = "D:\\IDN_GIS\\05_Analysis\\04_Geodatabases\\idn\\DSLR_" + str(
        threshold).zfill(2) + "mm.gdb\\DSLR_" + str(threshold).zfill(2) + "mm"
    global_data = []
    country_data = []
    for globaldata in os.listdir(globalfolder):
        if globaldata.endswith(".tif") or globaldata.endswith(".tiff"):
            prefixname = countrycode + "_cli_" + globaldata
            global_data.append(prefixname)
    for countrydata in os.listdir(countryfolder):
        if countrydata.endswith(".tif") or countrydata.endswith(".tiff"):
            country_data.append(countrydata)
    #print(globaldata)
    #print(country_data)
    for i in global_data:
        if i not in country_data:
            print(datelog + " : file DSLR " + i + " for country " +
                  countrycode + " is not available")
            logging.debug(datelog + " : Cropping DSLR data.......")
            globalname1 = i.split("_")
            globalname1.remove(countrycode)
            globalname1.remove('cli')
            globalname = "_".join(globalname1)
            globaldataloc = os.path.join(globalfolder, globalname)
            arcpy.CheckOutExtension("spatial")
            extractbymask = ExtractByMask(globaldataloc, subsetFile)
            extractbymask.save(os.path.join(countryfolder, i))
            logging.debug(datelog + " : Cropping DSLR data finished. File " +
                          i + "is created.......")
            arcpy.CheckInExtension("spatial")
    mosaicDataset.addRastertoMDS(MDS, countryfolder)
    mosaicDataset.addDateField(MDS)
    mosaicDataset.updateDateField(MDS,
                                  "DSLR_" + str(threshold).zfill(2) + "mm")
import os
import VampireDefaults
import re
from arcpy.sa import *
import arcpy

vp = VampireDefaults.VampireDefaults()
dir = ['01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12']
dirseasonal = [
    '010203', '020304', '030405', '040506', '050607', '060708', '070809',
    '080910', '091011', '101112', '111201', '120102'
]
dekad = ['1', '2', '3']

#====================== change following data before running the script ==========================#
datadir = 'D:\\IDN_GIS\\01_Data\\01_Global\\Rasters\\Climate\\Precipitation\\CHIRPS\\Seasonal\\TifData'
stddir = 'D:\\IDN_GIS\\01_Data\\01_Global\\Rasters\\Climate\\Precipitation\\CHIRPS\\Seasonal\\NewStatististics'
dekad_pattern = vp.get(
    'CHIRPS', 'global_seasonal_pattern'
)  # global_monthly_pattern, global_seasonal_pattern, global_dekad_pattern

Moregex_dekad = re.compile(dekad_pattern)


#===================================create list file of DEKAD============================#
def dekadLT():
    dictionary = {}
    for i in dir:
        for j in dekad:
            index = i + j
            content = []
コード例 #9
0
 def __init__(self):
     # load default values from .ini file
     self.vampire = VampireDefaults.VampireDefaults()
     return
def downloadCHIRPSData(interval, output_dir, tiffolder):
    interval = interval
    output_dir = output_dir
    start_date = None
    end_date = None
    dates = None
    overwrite = False

    vampire = VampireDefaults.VampireDefaults()
    _ftp_dir = vampire.get('CHIRPS', 'ftp_dir_{0}'.format(interval.lower()))
    files_list = []
    all_files = []
    if not os.path.exists(output_dir):
        # output directory does not exist, create it first
        os.makedirs(output_dir)
    with ftputil.FTPHost(vampire.get('CHIRPS', 'ftp_address'),
                         vampire.get('CHIRPS', 'ftp_user'),
                         vampire.get('CHIRPS', 'ftp_password')) as ftp_host:
        ftp_host.chdir(_ftp_dir)
        if interval.lower() == 'daily':
            # daily files are in directory by year so create new list of files
            # loop through years in dates and get from the correct directory
            _years = []
            _ftp_years = ftp_host.listdir(ftp_host.curdir)
            if start_date is not None:
                if end_date is not None:
                    # have both start and end dates, create list of years
                    for i in range(start_date.year, start_date.year + (end_date.year - start_date.year)):
                        _years.append(start_date.year + i)
                else:
                    # have start date but no end date. Find last year available and download all until then
                    for fd in _ftp_years:
                        if int(fd) >= start_date.year:
                            _years.append(int(fd))
            else:
                # no start date
                if end_date is not None:
                    # have end date but no start date. Find all years until end_date
                    for fd in _ftp_years:
                        if int(fd) <= end_date.year:
                            _years.append(int(fd))
                else:
                    # no start or end date.
                    if dates:
                        # have list of dates
                        for d in dates:
                            _years.append(int(d.split('-')[0]))
            _years = set(_years)
            for y in _years:
                ftp_host.chdir(ftp_host.path.join(_ftp_dir, str(y)))
                _files = ftp_host.listdir(ftp_host.curdir)
                if _files is not None:
                    for f in _files:
                        _f_abs = ftp_host.path.join(ftp_host.getcwd(), f)
                        all_files.append(_f_abs)
        else:
            all_files = ftp_host.listdir(ftp_host.curdir)
            #print(all_files)
        regex = re.compile(vampire.get('CHIRPS', 'global_{0}_pattern'.format(interval)))
        for f in all_files:
            download = False
            result = regex.match(os.path.basename(f))
            f_date = None
            if result is not None:
                if interval.lower() == 'monthly' or interval.lower() == 'dekad' or interval.lower() == 'daily':
                    f_year = result.group('year')
                    f_month = result.group('month')
                    f_day = 1
                    if interval.lower() == 'daily':
                        f_day = result.group('day')
                    f_date = datetime.datetime(int(f_year), int(f_month), int(f_day))
                elif interval.lower() == 'seasonal':
                    f_year = result.group('year')
                    f_month = result.group('season')[0:2]
                    f_date = datetime.datetime(int(f_year), int(f_month), 1)
                else:
                    raise ValueError, "Interval not recognised."
                if dates:
                    if '{0}-{1}'.format(f_year, f_month) in dates:
                        download = True
                elif (start_date is None) and (end_date is None):
                    download = True
                elif start_date is None:
                    # have end_date, check date is before
                    if f_date is not None:
                        if f_date <= end_date:
                            download = True
                elif end_date is None:
                    # have start_date, check date is after
                    if f_date is not None:
                        if f_date >= start_date:
                            download = True
                else:
                    # have both start and end date
                    if f_date is not None:
                        if f_date >= start_date and f_date <= end_date:
                            download = True
                if download:
                    if int(f_year) > 1980:
                        #print(f)
                        #print(f_year)
                        #print(f_month)

                        if ftp_host.path.isfile(f):
                            local_f = os.path.join(output_dir, os.path.basename(f))
                            tifdata = os.path.join(tiffolder, os.path.basename(f))
                            if not os.path.isfile(local_f) or overwrite:
                                logging.debug(datelog+": downloading data "+local_f)
                                ftp_host.download(f, local_f)  # remote, local
                                files_list.append(os.path.basename(f))
                                with gzip.open(local_f, 'rb') as _in_file:
                                    s = _in_file.read()
                                _path_to_store = os.path.splitext(tifdata)[0] #f[:-3]
                                print(_path_to_store)
                                if not os.path.isfile(_path_to_store) or overwrite:
                                    with open(_path_to_store, 'wb') as _out_file:
                                        _out_file.write(s)
                            else:
                                logging.debug(datelog+": "+f+" is available")

        logging.debug(datelog+": Download CHIRPS Data finished..")
        logging.debug(datelog+": Continue checking on rainfall anomaly and standard precipitattion index data..")