Ejemplo n.º 1
0
    def getDataPoint(self, oDataGeo):

        # -------------------------------------------------------------------------------------
        # Info start
        oLogStream.info(' ---> Get point data ... ')

        # Method to check class input
        bFileName = self.__checkInput()

        # Read points file
        if bFileName and isfile(self.__sFileName_SHP):
            oDataPoint_SOURCE = self.__readPointInfo()
        else:
            oDataPoint_SOURCE = None
            Exc.getExc(
                ' =====> ERROR: filename is undefined! (' +
                self.__sFileName_SHP + ')', 1, 1)

        # Read mask file(s)
        oDataPoint_OUTCOME = self.__savePointInfo(oDataPoint_SOURCE, oDataGeo)

        # Info end
        oLogStream.info(' ---> Get point data ... OK')

        # Return variable(s)
        return oDataPoint_OUTCOME
Ejemplo n.º 2
0
    def __getTimeArg(self, sTimeArgFormat='%Y-%m-%d %H:%M'):

        oLogStream.info(' ----> Configure time argument  ... ')
        oTimeArg = None
        try:
            if self.sTimeArg is None:
                oLogStream.info(
                    ' -----> Time argument is not set. Time will be taken using time library.'
                )
                self.sTimeArg = time.strftime(sTimeArgFormat, time.gmtime())
            else:
                oLogStream.info(
                    ' -----> Time argument is set using script arg(s)')

            oTimeArg = pd.to_datetime(self.sTimeArg, format=sTimeArgFormat)
            oTimeArg = oTimeArg.floor('min')
            oTimeArg = oTimeArg.replace(minute=0)

            self.sTimeArg = oTimeArg.strftime(sTimeFormat)

            oLogStream.info(' ----> Configure time argument ... DONE [' +
                            self.sTimeArg + ']')

        except BaseException:
            Exc.getExc(
                ' =====> ERROR: time argument definition failed! Check your data and settings!',
                1, 1)

        return oTimeArg
Ejemplo n.º 3
0
def readArcGrid(oFile):

    # Check method
    try:

        # Read Header
        a1oVarHeader = {
            "ncols": int(oFile.readline().split()[1]),
            "nrows": int(oFile.readline().split()[1]),
            "xllcorner": float(oFile.readline().split()[1]),
            "yllcorner": float(oFile.readline().split()[1]),
            "cellsize": float(oFile.readline().split()[1]),
            "NODATA_value": float(oFile.readline().split()[1]),
        }

        # Read grid values
        a2dVarData = np.loadtxt(oFile, skiprows=0)

        # Debugging
        # plt.figure(1)
        # plt.imshow(a2dVarData); plt.colorbar();
        # plt.show()

        return a2dVarData, a1oVarHeader

    except RuntimeError:
        # Exit status with error
        Exc.getExc(
            ' =====> ERROR: in readArcGrid function (Lib_Data_IO_Ascii)', 1, 1)
Ejemplo n.º 4
0
def removeExtZip(sFileName_ZIP, sZipExt=''):

    # Check null zip extension
    if sZipExt is None:
        sZipExt = ''

    # Check zip extension format
    if sZipExt is not '':
        # Check zip extension format in selected mode
        [sZipExt, bZipExt] = checkExtZip(sZipExt)
    else:
        # Check zip extension format in default mode
        sFileName_UNZIP, sZipExt = os.path.splitext(sFileName_ZIP)
        # Check zip extension format
        [sZipExt, bZipExt] = checkExtZip(sZipExt)

    # Create zip filename
    sFileName_UNZIP = ''
    if bZipExt is True:
        sFileName_UNZIP = sFileName_ZIP.split(sZipExt)[0]
        if sFileName_UNZIP.endswith('.'):
            sFileName_UNZIP = sFileName_UNZIP[0:-1]
        else:
            pass

    elif bZipExt is False:
        Exc.getExc(" =====> WARNING: sZipExt selected is not known extension! Add in zip dictionary if necessary!", 2, 1)
        [sFileName_UNZIP, sZipExt] = os.path.splitext(sFileName_ZIP)
    else:
        Exc.getExc(" =====> ERROR: error in selection sZipExt extension! Check in zip dictionary!", 1, 1)

    return sFileName_UNZIP, sZipExt
Ejemplo n.º 5
0
def checkFileZip(sFileName_IN, sZipExt='NoZip'):

    # Define zip extension value (to overwrite None assignment)
    if sZipExt is None:
        sZipExt = 'NoZip'

    # Check if string starts with point
    if sZipExt.startswith('.'):
        sZipExt = sZipExt[1:]
    else:
        pass

    # Check if zip extension is activated
    if not (sZipExt == 'NoZip' or sZipExt == ''):
        # Check zip extension format
        [sZipExt, bZipExt] = checkExtZip(sZipExt)

        if bZipExt:
            if sZipExt in sFileName_IN:
                sFileName_OUT = sFileName_IN
            else:
                sFileName_OUT = sFileName_IN + '.' + sZipExt
        else:
            Exc.getExc(" =====> WARNING: sZipExt selected is not known extension! Add in zip dictionary if necessary!", 2, 1)
            sFileName_OUT = sFileName_IN
    else:
        sFileName_OUT = sFileName_IN
    return sFileName_OUT
Ejemplo n.º 6
0
def findVarName(oVarList, sVarPattern='rain_$ensemble', oVarTags=None):

    if oVarTags is None:
        oVarTags = ['$period', '$ensemble']

    iVarTags_N = oVarTags.__len__()
    iVarPattern_N = sVarPattern.count('$')

    if iVarPattern_N > iVarTags_N:
        Exc.getExc(
            ' =====> WARNING: in finding variable name(s) the length of patterns with $'
            ' are greater then the length of tags definition!', 2, 1)

    sVarDefined = deepcopy(sVarPattern)
    for sVarTag in oVarTags:
        sVarDefined = sVarDefined.replace(sVarTag, '*')

    oVarList_FILTERED = fnmatch.filter(oVarList, sVarDefined)

    if isinstance(oVarList_FILTERED, str):
        oVarList_FILTERED = [oVarList_FILTERED]

    if oVarList_FILTERED.__len__() == 0:
        #Exc.getExc(' =====> ERROR: mismatch between group variable(s) and variable pattern! Check your settings!', 1, 1)
        oVarList_FILTERED = None

        Exc.getExc(
            ' =====> WARNING: in finding variable name(s), method returns NONE for variable '
            + sVarPattern + ' !', 2, 1)

    return oVarList_FILTERED
Ejemplo n.º 7
0
def addExtZip(sFileName_UNZIP, sZipExt=''):

    if sZipExt is None:
        sZipExt = ''

    if sZipExt != '':
        # Remove dot as starting character
        if sZipExt.startswith('.'):
            sZipExt = sZipExt[1:-1]

        # Check zip extension format
        [sZipExt, bZipExt] = checkExtZip(sZipExt)

        # Create zip filename
        sFileName_ZIP = ''
        if bZipExt is True:
            sFileName_ZIP = ''.join([sFileName_UNZIP, '.', sZipExt])
        elif bZipExt is False:
            Exc.getExc(" =====> WARNING: selected zip extension is unknown! Add in zip dictionary if necessary!", 2, 1)
            sFileName_ZIP = ''.join([sFileName_UNZIP, '.', sZipExt])
        else:
            Exc.getExc(" =====> ERROR: error in selecting zip extension! Check in zip dictionary!", 1, 1)

    else:
        sFileName_ZIP = sFileName_UNZIP

    return sFileName_ZIP, sZipExt
Ejemplo n.º 8
0
def appendFilePickle(sFileName, oFileData, sFileKey=None, oFileProtocol=-1):
    with open(sFileName, 'rb') as oFileHandle:
        oFileData_TMP = pickle.load(oFileHandle)

    oFileDSet_STORE_UPD = {}
    if isinstance(oFileData, dict) and isinstance(oFileData_TMP, dict):
        oFileData_STORED_ALL = oFileData_TMP.copy()

        if sFileKey in oFileData_STORED_ALL:
            oFileDSet_STORED_VAR = oFileData_STORED_ALL[sFileKey]
            oFileDSet = oFileData[sFileKey]

            try:
                oFileDSet_STORE_MERGE = oFileDSet_STORED_VAR.merge(oFileDSet)
                oFileDSet_STORE_UPD[sFileKey] = oFileDSet_STORE_MERGE
            except BaseException:

                Exc.getExc(
                    ' =====> WARNING: issue in merging datasets! Try to correct Longitude/Latitude coords!',
                    2, 1)

                for sVarName in list(oFileDSet.data_vars):
                    a3dVarData = oFileDSet[sVarName].values
                    oVarTime = pd.to_datetime(oFileDSet['time'].values)
                    a2dVarGeoY = oFileDSet_STORED_VAR['Latitude'].values
                    a2dVarGeoX = oFileDSet_STORED_VAR['Longitude'].values

                    oVarAttributes = oFileDSet[sVarName].attrs
                    oVarEncoding = oFileDSet[sVarName].encoding

                    oFileDSet_CORRECTED = xr.Dataset(
                        {
                            sVarName:
                            (['time', 'south_north', 'west_east'], a3dVarData)
                        },
                        attrs=oVarAttributes,
                        coords={
                            'Latitude': (['south_north',
                                          'west_east'], a2dVarGeoY),
                            'Longitude': (['south_north',
                                           'west_east'], a2dVarGeoX),
                            'time': oVarTime
                        })
                    oFileDSet_CORRECTED = oFileDSet_STORED_VAR.merge(
                        oFileDSet_CORRECTED)
                    oFileDSet_CORRECTED[sVarName].attrs = oVarAttributes
                    oFileDSet_CORRECTED[sVarName].encoding = oVarEncoding

                    # Reload updated datasets for next correction step
                    oFileDSet_STORED_VAR = deepcopy(oFileDSet_CORRECTED)

                oFileDSet_STORE_UPD[sFileKey] = oFileDSet_CORRECTED

        else:
            oFileDSet = oFileData[sFileKey]
            oFileDSet_STORE_UPD[sFileKey] = {}
            oFileDSet_STORE_UPD[sFileKey] = oFileDSet

    with open(sFileName, 'wb') as oFileHandle:
        pickle.dump(oFileDSet_STORE_UPD, oFileHandle, protocol=oFileProtocol)
Ejemplo n.º 9
0
def configVarFx(sVarFxName, oVarFxLibrary, kwargs):

    # Get and parser variable
    if hasattr(oVarFxLibrary, sVarFxName):

        if kwargs is not None:
            oFxArgs = kwargs
        else:
            oFxArgs = None

        oFxObj = getattr(oVarFxLibrary, sVarFxName)
        oFxSign = inspect.signature(oFxObj)

        oFxParams = {}
        for sFxParamsKey, oFxParamValue in oFxSign.parameters.items():
            if sFxParamsKey in list(oFxArgs.keys()):
                oFxParams[sFxParamsKey] = oFxArgs[sFxParamsKey]
            else:
                oFxParams[sFxParamsKey] = oFxParamValue.default

        oDataObj = oFxObj(**oFxParams)

        return oDataObj

    else:
        # Exit for error in defining methods to get data
        Exc.getExc(' =====> ERROR: selected method ' + sVarFxName +
                   ' not available in ' + str(oVarFxLibrary) + ' library', 1, 1)
Ejemplo n.º 10
0
def openFile(sFileName, sFileMode):
    try:
        oFile = open(sFileName, sFileMode)
        return oFile
    except IOError as oError:
        Exc.getExc(
            ' =====> ERROR: in open file (Lib_Data_IO_Ascii)' + ' [' +
            str(oError) + ']', 1, 1)
Ejemplo n.º 11
0
def openFile(sFileName):
    try:
        oFileData = gpd.read_file(sFileName)
        return oFileData
    except IOError as oError:
        Exc.getExc(
            ' =====> ERROR: in open file (lib_data_io_shapefile' + ' [' +
            str(oError) + ']', 1, 1)
Ejemplo n.º 12
0
 def __checkVar(oVarData_DEF):
     oVarData_CHECK = []
     for sVarData_DEF in oVarData_DEF:
         if sVarData_DEF in oVarData_Valid:
             oVarData_CHECK.append(sVarData_DEF)
         else:
             Exc.getExc(
                 ' =====> WARNING: geographical variable ' + sVarData_DEF +
                 ' is not a valid value. Check your settings!', 2, 1)
     return oVarData_CHECK
Ejemplo n.º 13
0
    def __setFileNameIN(self):

        # Define filename IN
        if self.sFileName_IN:
            self.sFilePath_IN = os.path.split(self.sFileName_IN)[0]
            self.sFileName_IN = os.path.split(self.sFileName_IN)[1]
        else:
            Exc.getExc(
                ' =====> ERROR: input filename is not defined! Please check driver argument(s)!',
                1, 1)
Ejemplo n.º 14
0
def lookupDictKey(dic, key, *keys):
    try:
        if keys:
            return lookupDictKey(dic.get(key, {}), *keys)
        return dic.get(key)
    except BaseException:
        Exc.getExc(
            ' =====> WARNING: impossible to get dictionary value using selected keys!',
            2, 1)
        return None
Ejemplo n.º 15
0
def getDictValue(dataDict, mapList, pflag=1):
    try:
        return functools.reduce(lambda d, k: d[k], mapList, dataDict)
    except BaseException:
        if pflag == 1:
            Exc.getExc(
                ' =====> WARNING: impossible to get dictionary value using selected keys!',
                2, 1)
        else:
            pass
        return None
Ejemplo n.º 16
0
def checkDictKeys(a1oVarCheck={'VarDefault': False}, sVarTag='variable(s)'):

    # Check variable input type
    if isinstance(a1oVarCheck, list):
        a1oVarCheck = convertList2Dict(a1oVarCheck)
    else:
        pass

    # Count number of true and false
    iVarF = list(a1oVarCheck.values()).count(False)
    iVarT = list(a1oVarCheck.values()).count(True)
    # Length of Data
    iVarLen = a1oVarCheck.__len__()

    # Compute variable percentage
    if iVarF == 0:
        dDictPerc = 100.0
    elif iVarT == 0:
        dDictPerc = 0.0
    else:
        dDictPerc = float(iVarT) / float(iVarLen)

    # Check variable(s) availability
    if iVarT > 0:
        iDictCheck = True
    else:
        iDictCheck = False

    # Select exit message(s) for variable(s) not defined
    if a1oVarCheck.__len__() != iVarF and iVarF > 0:
        a1oVarKeyNF = []
        for oVarKey, bVarValue in iter(a1oVarCheck.items()):
            if bVarValue is False:

                if isinstance(oVarKey, str):
                    a1oVarKeyNF.append(oVarKey)
                else:
                    a1oVarKeyNF.append(str(oVarKey))
            else:
                pass
        # Exit message if some variable(s) are not available
        a1sVarKeyNF = ', '.join(a1oVarKeyNF)
        Exc.getExc(
            ' =====> WARNING: ' + sVarTag + ' ' + a1sVarKeyNF +
            ' not defined in given dictionary!', 2, 1)
    elif a1oVarCheck.__len__() == iVarF and iVarF > 0:
        # Exit message if all variable(s) are not available
        Exc.getExc(
            ' =====> WARNING: all ' + sVarTag +
            ' not defined in given dictionary!', 2, 1)
    else:
        pass

    return iDictCheck, dDictPerc
Ejemplo n.º 17
0
def detectVarOutlier(oVarData_ENSEMBLE):

    # Check first value of data frame of probabilistic simulation(s). All values must be the same.
    a1dVarData_ENSEMBLE_FIRST = oVarData_ENSEMBLE.values[0]
    a1dVarData_ENSEMBLE_UNIQUE, a1iVarData_ENSEMBLE_COUNT = np.unique(a1dVarData_ENSEMBLE_FIRST, return_counts=True)

    if a1dVarData_ENSEMBLE_UNIQUE.shape[0] > 1:

        a1iVarData_ENSEMBLE_COUNT_SORT = np.sort(a1iVarData_ENSEMBLE_COUNT)
        a1iVarData_ENSEMBLE_INDEX_SORT = np.argsort(a1iVarData_ENSEMBLE_COUNT)

        a1dVarData_ENSEMBLE_UNIQUE_SORT = a1dVarData_ENSEMBLE_UNIQUE[a1iVarData_ENSEMBLE_INDEX_SORT]

        a1iVarData_ENSEMBLE_COUNT_SORT = a1iVarData_ENSEMBLE_COUNT_SORT[::-1]
        a1dVarData_ENSEMBLE_UNIQUE_SORT = a1dVarData_ENSEMBLE_UNIQUE_SORT[::-1]

        a1bVarData_OUTLIER = np.full((a1dVarData_ENSEMBLE_UNIQUE.shape[0]), False, dtype=bool)
        for iVarData_ENSEMBLE_ID, (dVarData_ENSEMBLE_OUTLIER, iVarData_ENSEMBLE_COUNT) in enumerate(
                zip(a1dVarData_ENSEMBLE_UNIQUE_SORT[1:], a1iVarData_ENSEMBLE_COUNT_SORT[1:])):

            if not np.isnan(dVarData_ENSEMBLE_OUTLIER):
                iVarData_ENSEMBLE_OUTLIER = np.where(a1dVarData_ENSEMBLE_FIRST == dVarData_ENSEMBLE_OUTLIER)[0]
                sVarData_ENSEMBLE_OUTLIER = str(iVarData_ENSEMBLE_OUTLIER + 1)

                Exc.getExc(' =====> WARNING: in probabilistic simulation(s) ensemble ' + sVarData_ENSEMBLE_OUTLIER
                           + ' starts with outlier value ' + str(dVarData_ENSEMBLE_OUTLIER) + ' !', 2, 1)

                a1dVarData_OUTLIER = oVarData_ENSEMBLE.values[:, iVarData_ENSEMBLE_OUTLIER[0]]
                if a1dVarData_OUTLIER.size > 5:
                    a1iVarIdx_OUTLIER = [0, 1, 2, 3]
                else:
                    a1iVarIdx_OUTLIER = [0]

                a1sVarData_OUTLIER = ', '.join(str(dValue) for dValue in (np.take(a1dVarData_OUTLIER, a1iVarIdx_OUTLIER)))
                Exc.getExc(' =====> WARNING: ensemble with following starting values [' + a1sVarData_OUTLIER
                           + ' ... ] will be filtered! ', 2, 1)

                a1bVarData_OUTLIER[iVarData_ENSEMBLE_ID] = True
            else:
                a1bVarData_OUTLIER[iVarData_ENSEMBLE_ID] = False

        if np.any(a1bVarData_OUTLIER):
            bVarData_OUTLIER = True
        else:
            bVarData_OUTLIER = False
    else:
        bVarData_OUTLIER = False

    # Filter last values of data frame (to avoid some writing errors in closing ascii file)
    oVarData_ENSEMBLE.iloc[-1] = np.nan

    return oVarData_ENSEMBLE, bVarData_OUTLIER
Ejemplo n.º 18
0
 def __checkFile(oVarData, oFileData):
     a1bFileData = []
     for sVarData, sFileData in zip(oVarData, oFileData):
         if isfile(sFileData):
             bFileData = True
         else:
             bFileData = False
             Exc.getExc(
                 ' =====> WARNING: geographical filename ' + sFileData +
                 ' is not available for defining variable ' + sVarData +
                 '. Check your settings!', 2, 1)
         a1bFileData.append(bFileData)
     return a1bFileData
Ejemplo n.º 19
0
def readFilePickle(sFileName):

    try:
        with open(sFileName, 'rb') as oFileHandle:
            oFileData = pickle.load(oFileHandle)
    except BaseException:
        #with open(sFileName, 'rb') as oFileHandle:
        oFileData = pd.read_pickle(sFileName)
        Exc.getExc(
            ' =====> WARNING: pickle file was created using another version of pandas library!',
            2, 1)

    return oFileData
Ejemplo n.º 20
0
    def __init__(self,
                 sFileName_IN,
                 sZipMode='',
                 sFileName_OUT=None,
                 sZipType=None):

        # Global variable(s)
        self.sFileName_IN = sFileName_IN
        self.sFileName_OUT = sFileName_OUT
        self.sZipMode = sZipMode
        self.sZipType = sZipType

        # Define filename IN
        self.__setFileNameIN()
        # Define filename OUT
        self.__setFileNameOUT()

        # Select zip library and methods
        if self.sFileName_IN.endswith('gz') or self.sZipType == 'gz':
            sZipType = 'GZip'
            self.oFileWorkspace = GZip(
                join(self.sFilePath_IN, self.sFileName_IN),
                join(self.sFilePath_OUT, self.sFileName_OUT), self.sZipMode,
                self.sZipType)
        elif self.sFileName_IN.endswith('7z') or self.sZipType == '7z':
            sZipType = '7Zip'
            pass
        elif self.sFileName_IN.endswith('bz2') or self.sZipType == 'bz2':
            sZipType = 'BZ2Zip'
            self.oFileWorkspace = BZ2(
                join(self.sFilePath_IN, self.sFileName_IN),
                join(self.sFilePath_OUT, self.sFileName_OUT), self.sZipMode,
                self.sZipType)

        elif self.sZipType == 'NoZip' or not self.sZipType:
            sZipType = 'NoZip'
            self.oFileWorkspace = NoZip(
                join(self.sFilePath_IN, self.sFileName_IN),
                join(self.sFilePath_OUT, self.sFileName_OUT), self.sZipMode,
                self.sZipType)

        else:

            if sZipMode == 'z':
                Exc.getExc(
                    ' =====> ERROR: zip or unzip functions are not selected! Please check zip tag!',
                    1, 1)
            elif sZipMode == 'u':
                Exc.getExc(
                    ' =====> WARNING: zip or unzip functions are not selected! Please check zip tag!',
                    2, 1)
Ejemplo n.º 21
0
def removeDictKey(d, keys):

    if isinstance(keys, list):
        r = dict(d)
        for key in keys:
            if key in d:
                del r[key]
            else:
                pass
        return r
    else:
        Exc.getExc(' =====> WARNING: keys values must be included in a list!',
                   2, 1)
        return d
Ejemplo n.º 22
0
def createDArray1D(oVarDArray,
                   oVarPeriod,
                   sVarName_IN='rain',
                   sVarName_OUT=None,
                   oVarCoords=None,
                   oVarDims=None):

    # Initialize coord(s) and dim(s)
    if oVarCoords is None:
        oVarCoords = ['time']
    if oVarDims is None:
        oVarDims = ['time']
    if sVarName_OUT is None:
        sVarName_OUT = sVarName_IN

    # Define data period
    oVarPeriod_DATA = pd.DatetimeIndex(oVarDArray.time.to_pandas().values)
    oVarPeriod_SEL = oVarPeriod.intersection(oVarPeriod_DATA)

    # Get data, attribute(s) and encoding(s) for selected data array
    oVarDArray_SEL = oVarDArray.loc[dict(time=oVarPeriod_SEL)]
    oAttributeDArray_SEL = oVarDArray_SEL.attrs
    try:
        oEncodingDArray_SEL = {
            '_FillValue': float(oVarDArray_SEL.encoding['_FillValue']),
            'scale_factor': int(oVarDArray_SEL.encoding['scale_factor'])
        }
    except BaseException:
        Exc.getExc(
            ' =====> WARNING: in creating data array 1D _FillValue and scale_factor are not defined! Try'
            'to correct with default values (_FillValue=-9999.0; scale_factor=1) ',
            2, 1)
        oEncodingDArray_SEL = {'_FillValue': -9999.0, 'scale_factor': 1}

    # Initialize empty data array
    a1dVarArray_EMPTY = np.zeros([oVarPeriod.__len__()])
    a1dVarArray_EMPTY[:] = np.nan
    oVarDArray_EMPTY = xr.DataArray(
        a1dVarArray_EMPTY,
        name=sVarName_IN,
        attrs=oAttributeDArray_SEL,  # encoding=oEncodingDArray_SEL,
        dims=oVarDims[0],
        coords={oVarCoords[0]: (oVarDims[0], oVarPeriod)})
    # Combine empty data array with selected data array
    oVarDSet_FILLED = oVarDArray_EMPTY.combine_first(oVarDArray_SEL)

    if sVarName_IN != sVarName_OUT:
        oVarDSet_FILLED.name = sVarName_OUT

    return oVarDSet_FILLED
Ejemplo n.º 23
0
    def checkDataTags(oVarTags_REF, oVarTags_DEF):

        for sTagKey in list(oVarTags_DEF.keys()):
            sVarTag_DEF = list(oVarTags_DEF[sTagKey].keys())[0]

            if sVarTag_DEF not in list(oVarTags_REF.keys()):
                Exc.getExc(' =====> WARNING: valid key [' + sVarTag_DEF + '] is not defined in algorithm key', 2, 1)
            else:
                oVarTags_REF.pop(sVarTag_DEF)

        if oVarTags_REF.__len__() > 1:
            oListTags_UNDEF = list(oVarTags_REF.keys())
            a1sListTags_UNDEF = ','.join(oListTags_UNDEF)
            Exc.getExc(' =====> WARNING: key(s) ' + a1sListTags_UNDEF + ' are undefined! Check your settings!', 2, 1)
Ejemplo n.º 24
0
def findVarTag(oVarList_FILTERED, sVarPattern='rain_$ensemble', oVarTags=None):

    if oVarTags is None:
        oVarTags = ['$period', '$ensemble']

    iVarTags_N = oVarTags.__len__()
    iVarPattern_N = sVarPattern.count('$')

    if iVarPattern_N > iVarTags_N:
        Exc.getExc(
            ' =====> WARNING: in finding tag(s) value(s) the length of patterns with $'
            ' are greater then the length of tags definition!', 2, 1)

    oVarID = []
    oVarTags_FILTERED = deepcopy(oVarTags)
    sVarEnsemble = deepcopy(sVarPattern)
    for sVarTag in oVarTags:
        if sVarTag in sVarEnsemble:
            oVarID.append(sVarEnsemble.index(sVarTag))
        else:
            oVarTags_FILTERED.remove(sVarTag)
    if oVarID:
        a1oVarTags_SORTED = sorted(zip(oVarID, oVarTags_FILTERED))
    else:
        a1oVarTags_SORTED = None

    if a1oVarTags_SORTED is not None:
        oTagsName_FILTERED = []
        for iVarID_SORTED, oVarTags_SORTED in enumerate(a1oVarTags_SORTED):
            if '$ensemble' in oVarTags_SORTED:
                iVarID_ENSEMBLE = iVarID_SORTED
            oTagsName_FILTERED.append(oVarTags_SORTED[1])
    else:
        oTagsName_FILTERED = None

    oTagsValue_FILTERED = []
    for sVarList_FILTERED in oVarList_FILTERED:
        oTagValue_FILTERED = re.findall(r'\d+', sVarList_FILTERED)
        if oTagValue_FILTERED:
            oTagsValue_FILTERED.append(oTagValue_FILTERED)

    if not oTagsValue_FILTERED:
        oTagsValue_FILTERED = None

    if oTagsName_FILTERED is not None:
        oTagsName_FILTERED = list(
            repeat(oTagsName_FILTERED, oTagsValue_FILTERED.__len__()))

    return oTagsName_FILTERED, oTagsValue_FILTERED
Ejemplo n.º 25
0
def prepareDictKey(ob_keys, sep_keys=''):
    try:
        if isinstance(ob_keys, str):
            if sep_keys:
                dict_keys = ob_keys.split(sep_keys)
            else:
                dict_keys = [ob_keys]
            return dict_keys
        elif isinstance(ob_keys, list):
            dict_keys = ob_keys
            return dict_keys
        else:
            Exc.getExc(' =====> ERROR: keys format unknown!', 1, 1)
    except BaseException:
        return None
Ejemplo n.º 26
0
def addVarAttrs(oVarObj, oVarAttrs):
    for sAttrKey, oAttrValue in oVarAttrs.items():

        if isinstance(oVarObj, xr.DataArray):
            oVarObj.attrs[sAttrKey] = oAttrValue
        else:
            with warnings.catch_warnings():
                warnings.simplefilter("ignore", category=UserWarning)
                setattr(oVarObj, sAttrKey, oAttrValue)
                try:
                    oVarObj._metadata.append(sAttrKey)
                except BaseException:
                    Exc.getExc(
                        ' =====> ERROR: add attribute(s) to dataframe obj in _metadata list FAILED! ',
                        1, 1)
    return oVarObj
Ejemplo n.º 27
0
def createStats1D(oVarStats, oVarData, sVarNameGroup):

    if oVarStats is None:
        oVarStats = {}

    if sVarNameGroup not in list(oVarStats.keys()):
        oVarStats[sVarNameGroup] = {}
        oVarStats[sVarNameGroup]['min'] = []
        oVarStats[sVarNameGroup]['max'] = []
        oVarStats[sVarNameGroup]['average'] = []

    if np.isnan(oVarData.values).all():
        dVarMax = np.nan
        dVarMin = np.nan
        dVarAvg = np.nan
        Exc.getExc(
            ' =====> WARNING: in calculating 1D stats for type ' +
            oVarData.name + ' all values in time series are null!', 2, 1)
    else:
        dVarMax = np.nanmax(oVarData.values)
        dVarMin = np.nanmin(oVarData.values)
        dVarAvg = np.nanmean(oVarData.values)

    if isinstance(dVarMax, np.float64):
        dVarMax = dVarMax.astype(np.float32)
    if isinstance(dVarMin, np.float64):
        dVarMin = dVarMin.astype(np.float32)
    if isinstance(dVarAvg, np.float64):
        dVarAvg = dVarAvg.astype(np.float32)

    if not oVarStats[sVarNameGroup]['min']:
        oVarStats[sVarNameGroup]['min'] = [dVarMin]
    else:
        oVarStats[sVarNameGroup]['min'].append(dVarMin)

    if not oVarStats[sVarNameGroup]['max']:
        oVarStats[sVarNameGroup]['max'] = [dVarMax]
    else:
        oVarStats[sVarNameGroup]['max'].append(dVarMax)

    if not oVarStats[sVarNameGroup]['average']:
        oVarStats[sVarNameGroup]['average'] = [dVarAvg]
    else:
        oVarStats[sVarNameGroup]['average'].append(dVarAvg)

    return oVarStats
Ejemplo n.º 28
0
def readFileNC4(sVarFileName, oVarGroup=None, oVarEngine='h5netcdf'):

    if oVarGroup is not None:
        if isinstance(oVarGroup, str):
            oVarGroup = [oVarGroup]
        oVarData = {}
        for sVarGroup in oVarGroup:

            try:
                with xr.open_dataset(sVarFileName,
                                     group=sVarGroup,
                                     engine=oVarEngine) as oDSet:
                    oVarData[sVarGroup] = oDSet.load()
                    oDSet.close()
            except BaseException:

                if os.path.exists(sVarFileName):
                    with netCDF4.Dataset(sVarFileName) as oDSet:
                        if hasattr(oDSet, 'groups'):
                            oFileGroups = list(getattr(oDSet, 'groups'))
                        else:
                            oFileGroups = None
                else:
                    oFileGroups = None

                if oFileGroups is None:
                    Exc.getExc(
                        ' =====> WARNING: in reading netcdf file ' +
                        sVarFileName + ' error(s) occurred!', 2, 1)
                else:
                    if sVarGroup not in oFileGroups:
                        Exc.getExc(
                            ' =====> WARNING: in reading netcdf file ' +
                            sVarFileName + ' group ' + sVarGroup +
                            ' is not already available in file!', 2, 1)
                    else:
                        Exc.getExc(
                            ' =====> WARNING: in reading netcdf file ' +
                            sVarFileName + ' group ' + sVarGroup +
                            ' is available in file! Error(s) in handling data occurred!',
                            2, 1)

                oVarData[sVarGroup] = None
    else:
        try:
            with xr.open_dataset(sVarFileName, engine=oVarEngine) as oDSet:
                oVarData = oDSet.load()
                oDSet.close()
        except BaseException:
            Exc.getExc(
                ' =====> WARNING: in reading netcdf file ' + sVarFileName +
                ' error(s) occurred!', 2, 1)
            oVarData = None

    return oVarData
Ejemplo n.º 29
0
def openZip(sFileName_IN, sFileName_OUT, sZipMode):
    # Check method
    try:

        # Open file
        if sZipMode == 'z':  # zip mode
            oFile_IN = open(sFileName_IN, 'rb')
            oFile_OUT = gzip.open(sFileName_OUT, 'wb')
        elif sZipMode == 'u':  # unzip mode
            oFile_IN = gzip.GzipFile(sFileName_IN, "rb")
            oFile_OUT = open(sFileName_OUT, "wb")

        # Pass file handle(s)
        return oFile_IN, oFile_OUT

    except IOError as oError:
        Exc.getExc(
            ' =====> ERROR: in open file (GZip Zip)' + ' [' + str(oError) +
            ']', 1, 1)
Ejemplo n.º 30
0
def createTemp(sPathTemp=None, iMethodTemp=1):

    # -------------------------------------------------------------------------------------
    # Check for undefined temporary folder string
    if sPathTemp is None:
        iMethodTemp = 2
    # -------------------------------------------------------------------------------------

    # -------------------------------------------------------------------------------------
    # Define temporary folder method
    if iMethodTemp == 1:

        # -------------------------------------------------------------------------------------
        # Create temporary folder to copy file from source (to manage multiprocess request)
        sRN1 = str(randint(0, 1000))
        sRN2 = str(randint(1001, 5000))
        oTimeTemp = datetime.datetime.now()
        sTimeTemp = oTimeTemp.strftime('%Y%m%d-%H%M%S_%f')
        sFolderTemp = sTimeTemp.lower() + '_' + sRN1.lower(
        ) + '_' + sRN2.lower()
        # -------------------------------------------------------------------------------------
    elif iMethodTemp == 2:
        # -------------------------------------------------------------------------------------
        # Create temporary folder in a system temp folder
        sFolderTemp = tempfile.mkdtemp()
        # -------------------------------------------------------------------------------------
    else:
        # -------------------------------------------------------------------------------------
        # Exit with warning (method unknown)
        Exc.getExc(
            ' =====> WARNING: invalid choice for temporary folder method!', 2,
            1)
        sFolderTemp = None
        # -------------------------------------------------------------------------------------

    # -------------------------------------------------------------------------------------
    # Return temporary folder
    return sFolderTemp