Ejemplo n.º 1
0
    def Dates_ampm224h(self, Dates, Hours=None, Date_Format=None):
        '''
        DESCRIPTION:
    
            Converts dates from AM/PM to 24 hour dates.         
        _______________________________________________________________________

        INPUT:
            + Dates: Vector with Dates in string format.
            + DateE: Vector with Hours in string format, defaulted to None if
                     the Dates vector has the dates.
        _______________________________________________________________________
        
        OUTPUT:
        
            - DatesP: Complete Python datetime vector in 24 hour format.
        '''

        if Hours != None:
            Hours2 = []
            if Hours[0][-1] == 'a' or Hours[0][-1] == 'p':
                for H in Hours:
                    Hours2.append(H + 'm')
            else:
                Hours2 = Hours
            DatesC = np.array(
                [i + ' ' + Hours2[ii] for ii, i in enumerate(Dates)])
        else:
            DatesC = []
            if Dates[0][-1] == 'a' or Dates[0][-1] == 'p':
                for D in Dates:
                    DatesC.append(D + 'm')
            else:
                DatesC = Dates
            DatesC = np.array(DatesC)
        # -------------------------
        # Date_Format Verification
        # -------------------------
        if Date_Format == None:
            Date_Formats = self.DateTimeAMPM_Formats
        else:
            Date_Formats = [Date_Format]

        # -------------------------
        # Transformation
        # -------------------------
        for iF, F in enumerate(Date_Formats):
            try:
                DatesP = np.array([datetime.strptime(i, F) for i in DatesC])
                break
            except:
                DatesP = None
                if iF == len(Date_Formats) - 1:
                    Er = utl.ShowError('Dates_ampm224h', 'DatesUtil',
                                       'Bad date format, change format')
                    return Er
                else:
                    continue

        return DatesP
Ejemplo n.º 2
0
    def __init__(self, PathImg='', Fol='201303060940', V=['Prec', 'Pres']):
        MapFold = np.array(utl.GetFolders(PathImg + 'Maps/'))
        SeriesFold = np.array(utl.GetFolders(PathImg + 'Series/'))
        RadarFold = np.array(utl.GetFolders(PathImg + 'Radar/'))

        xFolMap = np.where(MapFold == Fol)[0]
        xFolSer = np.where(SeriesFold == Fol)[0]
        xFolRad = np.where(RadarFold == Fol)[0]

        if len(xFolSer) == 0:
            E = utl.ShowError(
                '__init__', 'MapSeriesGen',
                'No se encuentra la carpeta en los dos directorios')
            raise E

        if len(xFolMap) != 0:
            self.PathMaps = PathImg + 'Maps/' + MapFold[xFolMap[0]] + '/'
            self.ArchMap = gl.glob(self.PathMaps + '*.png')
            self.Names = [i[len(self.PathMaps):-4] for i in self.ArchMap]

        self.PathSeries = PathImg + 'Series/' + SeriesFold[
            xFolSer[0]] + '/' + V[1] + '/'
        self.ArchSeries = gl.glob(self.PathSeries + '*.png')
        self.NamesSeries = [
            i[len(self.PathSeries):-4] for i in self.ArchSeries
        ]

        if len(xFolRad) != 0:
            # Se cargan los archivos, tener en cuenta la variable del radar 'DBZH'
            self.PathRadar1 = PathImg + 'Radar/' + SeriesFold[
                xFolSer[0]] + '/' + V[0] + '/DBZH/'
            self.PathRadar2 = PathImg + 'Radar/' + SeriesFold[
                xFolSer[0]] + '/' + V[1] + '/DBZH/'
            self.ArchRadar1 = gl.glob(self.PathRadar1 + '*.png')
            self.ArchRadar2 = gl.glob(self.PathRadar2 + '*.png')
            self.NamesRadar1 = [
                i[len(self.PathRadar1):-4] for i in self.ArchRadar1
            ]
            self.NamesRadar2 = [
                i[len(self.PathRadar2):-4] for i in self.ArchRadar2
            ]

        return
Ejemplo n.º 3
0
    def Dates_datetime2str(self, DatesP, Date_Format=None):
        '''
        DESCRIPTION:
    
            This function takes a Python date or datetime vector and return a
            string data vector.
        _______________________________________________________________________

        INPUT:
            + DatesP: Python date or datetime vector.
            + Dates_Format: Format of the dates given, it must be given in 
                            datetime string format like %Y/%m/%d %H%M.
        _______________________________________________________________________
        
        OUTPUT:
        
            - Dates: Dates string vector.
        '''
        # ----------------
        # Error managment
        # ----------------
        if isinstance(DatesP[0], datetime) == False and isinstance(
                DatesP[0], date) == False:
            Er = utl.ShowError(
                'Dates_datetime2str', 'DatesUtil',
                'Bad DatesP format given, not in date or datetime format')
            raise TypeError
        # -------------------------
        # Date_Format Verification
        # -------------------------
        if Date_Format == None:
            if isinstance(DatesP[0], datetime):
                Date_Formats = self.DateTime_Formats
            else:
                Date_Formats = self.Date_Formats
        else:
            Date_Formats = [Date_Format]
        # -------------------------
        # Changing Dates
        # -------------------------
        Dates = np.array([i.strftime(Date_Formats[0]) for i in DatesP])
        return Dates
Ejemplo n.º 4
0
    def AddElim(self, Elim):
        '''
        DESCRIPTION:
            This function loads the data of a station and compiles 
            it in a dictionary.
        _____________________________________________________________
        INPUT:
            :param Elim: A Dict, dictionary with the values that would
                                 be eliminated over and lower.
                                 Ex: Elim = {'ElimOver':{'RSC':3000},
                                 'ElimLow:{'TC':-1}}
        '''
        FlagKeyOver = True
        FlagKeyLow = True
        try:
            ElimLabOver = list(Elim['ElimOver'])
        except KeyError:
            FlagKeyOver = False
        except TypeError:
            FlagKeyOver = False

        try:
            ElimLabLow = list(Elim['ElimLow'])
        except KeyError:
            FlagKeyLow = False
        except TypeError:
            FlagKeyLow = False

        if not (FlagKeyOver) and not (FlagKeyLow):
            r = utl.ShowError(
                'AddElim', 'OpenWundergrounds',
                'No elimination was added, review the Elim parameter')
            raise KeyError

        if FlagKeyOver:
            for Lab in ElimLabOver:
                self.ElimOver[Lab] = Elim['ElimOver'][Lab]

        if FlagKeyLow:
            for Lab in ElimLabLow:
                self.ElimLow[Lab] = Elim['ElimLow'][Lab]
Ejemplo n.º 5
0
def EDIDEAM(File=None):
    '''
    DESCRIPTION:

        With this function the information of an IDEAM type file can 
        be extracted.
    _______________________________________________________________________

    INPUT:
        + File: File that would be extracted including the path.
                Default value is None because it works with Open_Data.
    _______________________________________________________________________
    
    OUTPUT:
        
    '''
    if File == None:
        Er = utl.ShowError('EDIDEAM', 'EDSM', 'No file was added')
        return None, None, None, None, None
    # Match Variables
    Station_Compile = re.compile('ESTACION')
    Lat_Compile = re.compile('LATITUD')
    Lon_Compile = re.compile('LONGITUD')
    Elv_Compile = re.compile('ELEVACION')
    Year_Compile = re.compile(' ANO ')
    Var_Temp_Compile = re.compile('TEMPERATURA')
    Var_Prec_Compile = re.compile('PRECIPITACION')
    Var_BS_Compile = re.compile('BRILLO SOLAR')
    Med_Temp_Compile = re.compile('VALORES MEDIOS')
    Min_Temp_Compile = re.compile('VALORES MINIMOS')
    Max_Temp_Compile = re.compile('VALORES MAXIMOS')

    Start_Compile = re.compile('ENERO')

    # Open the file
    try:
        f = open(File, 'r', encoding='utf-8')
        Lines = np.array(f.readlines())
        Sum1 = 6
        Sum2 = 2
    except TypeError:
        f = open(File, 'r')
        Lines1 = f.readlines()
        Lines = np.array([i.encode('UTF-8') for i in Lines1])
        Sum1 = 3
        Sum2 = 1

    if platform.system() == 'Windows':
        Sum1 = 3
        Sum2 = 1

    # Variables para la estación
    Station_Match = []
    Row_Station = []
    Station_Code = []
    Station_Name = []
    Lat_Match = []
    Lats = []
    Lon_Match = []
    Lons = []
    Elv_Match = []
    Elvs = []
    # Variables para el año
    Year_Match = []
    Row_Year = []
    Years = []
    # Estaciones con Temperatura
    Var_Temp_Match = []
    Row_Var_Temp = []
    # Estaciones con Precipitación
    Var_Prec_Match = []
    Row_Var_Prec = []
    # Estaciones con Brillo Solar
    Var_BS_Match = []
    Row_Var_BS = []
    # Valores Medios
    Med_Temp_Match = []
    Row_Med_Temp = []
    # Inicio de datos
    Start_Match = []
    Row_Start = []

    keys = []
    Stations_Code = []
    Stations_Name = []
    Value_Dict = dict()
    DateP_Dict = dict()
    Flags_Dict = dict()
    x = 0
    for irow, row in enumerate(Lines):
        # Stations
        if re.search(Station_Compile, row) != None:
            Row_Station.append(irow)
            Station_Match.append(re.search(Station_Compile, row))
            Station_Code.append(row[Station_Match[-1].end() +
                                    3:Station_Match[-1].end() + 3 + 8])
            Station_Name.append(row[Station_Match[-1].end() + 3 + 8 + 1:-1])
        # Latitude
        if re.search(Lat_Compile, row) != None:
            Lat_Match.append(re.search(Lat_Compile, row))
            Lats.append(row[Lat_Match[-1].end() + 4:Lat_Match[-1].end() + 4 +
                            6])
        # Longitude
        if re.search(Lon_Compile, row) != None:
            Lon_Match.append(re.search(Lon_Compile, row))
            Lons.append(row[Lon_Match[-1].end() + 3:Lon_Match[-1].end() + 3 +
                            6])
        # Elevation
        if re.search(Elv_Compile, row) != None:
            Elv_Match.append(re.search(Elv_Compile, row))
            Elvs.append(row[Elv_Match[-1].end() + 2:Elv_Match[-1].end() + 2 +
                            4])
        # Years
        if re.search(Year_Compile, row) != None:
            Row_Year.append(irow)
            Year_Match.append(re.search(Year_Compile, row))
            Years.append(row[Year_Match[-1].end() + 1:Year_Match[-1].end() +
                             1 + 4])
        # Temperature
        if re.search(Var_Temp_Compile, row) != None:
            Row_Var_Temp.append(irow)
            Var_Temp_Match.append('TEMPERATURA')
            if re.search(Med_Temp_Compile, row) != None:
                Row_Med_Temp.append(irow)
                Med_Temp_Match.append('MEDIO')
            elif re.search(Min_Temp_Compile, row) != None:
                Med_Temp_Match.append('MINIMO')
            elif re.search(Max_Temp_Compile, row) != None:
                Med_Temp_Match.append('MAXIMO')
            else:
                Med_Temp_Match.append(0)
        # Precipitation
        if re.search(Var_Prec_Compile, row) != None:
            Row_Var_Prec.append(irow)
            Var_Temp_Match.append('PRECIPITACION')
            Med_Temp_Match.append('TOTALES')

        # Brillo Solar
        if re.search(Var_BS_Compile, row) != None:
            Row_Var_BS.append(irow)
            Var_Temp_Match.append('BRILLO SOLAR')
            Med_Temp_Match.append('TOTALES')
        # Data
        if re.search(Start_Compile, row) != None:
            Row_Start.append(irow)
            Start_Match.append(re.search(Start_Compile, row))

            xRow_St = Row_Start[-1] + Sum1
            xCol_St = Start_Match[-1].start()
            if Row_Start[-1] <= 20:
                DateP = []
                Value = []
                Flags = []
                DateP, Value, Flags = LoopDataDaily_IDEAM(
                    DateP, Value, Flags, xRow_St, xCol_St, Years[-1], Lines,
                    Sum2)
            elif (Station_Code[-1] == Station_Code[-2]) and (
                    Med_Temp_Match[-1]
                    == Med_Temp_Match[-2]) and (Var_Temp_Match[-1]
                                                == Var_Temp_Match[-2]):
                DateP, Value, Flags = LoopDataDaily_IDEAM(
                    DateP, Value, Flags, xRow_St, xCol_St, Years[-1], Lines,
                    Sum2)
            else:
                keys.append(Station_Code[-2] + '_' + Var_Temp_Match[-2] + '_' +
                            Med_Temp_Match[-2])
                Stations_Name.append(Station_Name[-2])
                Stations_Code.append(Station_Code[-2])
                DateP_Dict[Station_Code[-2]+'_'+Var_Temp_Match[-2]+'_'+Med_Temp_Match[-2]]=\
                    DateP
                Value_Dict[Station_Code[-2]+'_'+Var_Temp_Match[-2]+'_'+Med_Temp_Match[-2]]=\
                    Value
                Flags_Dict[Station_Code[-2]+'_'+Var_Temp_Match[-2]+'_'+Med_Temp_Match[-2]]=\
                    Flags
                DateP = []
                Value = []
                Flags = []
                DateP, Value, Flags = LoopDataDaily_IDEAM(
                    DateP, Value, Flags, xRow_St, xCol_St, Years[-1], Lines,
                    Sum2)

        if irow == len(Lines) - 1:
            keys.append(Station_Code[-1] + '_' + Var_Temp_Match[-1] + '_' +
                        Med_Temp_Match[-1])
            DateP_Dict[Station_Code[-1]+'_'+Var_Temp_Match[-1]+'_'+Med_Temp_Match[-1]]=\
                DateP
            Value_Dict[Station_Code[-1]+'_'+Var_Temp_Match[-1]+'_'+Med_Temp_Match[-1]]=\
                Value
            Flags_Dict[Station_Code[-1]+'_'+Var_Temp_Match[-1]+'_'+Med_Temp_Match[-1]]=\
                Flags

    # Station information
    Stations_Code_Un, xU = np.unique(np.array(Station_Code), return_index=True)
    Stations_Name_Un = np.array(Station_Name)[xU]
    Lat_Un = np.array(Lats)[xU]
    Lon_Un = np.array(Lons)[xU]
    Elv_Un = np.array(Elvs)[xU]

    Stations_Information = dict()
    Stations_Information['CODE'] = Stations_Code_Un
    Stations_Information['NAME'] = Stations_Name_Un
    Stations_Information['LATITUDE'] = [i[:2] + '.' + i[2:] for i in Lat_Un]
    Stations_Information['LONGITUDE'] = [i[:2] + '.' + i[2:] for i in Lon_Un]
    Stations_Information['ELEVATION'] = Elv_Un

    # Search for the Flags meaning
    Flag_Meaning_Dict = dict()

    keys = list(Flags_Dict)

    for ikey, key in enumerate(keys):
        if ikey == 0:
            Flag_Un = np.unique(np.array(Flags_Dict[key]))
        else:
            Flag_Un = np.hstack(
                (Flag_Un, np.unique(np.array(Flags_Dict[key]))))

    Flag_Un2 = np.unique(Flag_Un)

    # Removing the nan values
    x = np.where(Flag_Un2 == 'nan')
    Flag_Un2 = np.delete(Flag_Un2, x)

    # Verify the flags meanining
    Flag_Meaning = []
    for flag in Flag_Un2:
        Flag_Compile = re.compile(flag + ' :')

        for irow, row in enumerate(Lines):
            # Stations
            if re.search(Flag_Compile, row) != None:
                Flag_Match = re.search(Flag_Compile, row)
                if Sum1 == 6:
                    Flag_Meaning.append(row[Flag_Match.start():-1])
                else:
                    Flag_Meaning.append(row[Flag_Match.start():-3])
                break

    return DateP_Dict, Value_Dict, Flags_Dict, Flag_Meaning, Stations_Information
Ejemplo n.º 6
0
def EDExcel(File=None,
            sheet=None,
            colDates=(0, ),
            colData=(1, ),
            row_skip=1,
            flagHeader=True,
            row_end=None,
            num_NaN=None):
    '''
    DESCRIPTION:

        This function extracts time series data from a sheet in an Excel
        file.
    _______________________________________________________________________

    INPUT:
        + File: File that needs to be open.
        + sheet: Index (number) or name (string) of the sheet.
        + colDates: tuple sequence with the columns where the dates or 
                    string data is. Defaulted to (0). if None it extracts
                    all the data as the original format dictionary.
        + colData: tuple sequence with the columns where the data is.
                   Defaulted to (1). If None it extracts all the data
                   as a float data.
        + row_skip: begining row of the data. Defaulted to 1
        + flagHeader: flag to get the header of the information.
        + row_end: Ending row if nedded, defaulted to None.
        + num_NaN

    _______________________________________________________________________
    
    OUTPUT:
        Dates and values are given in dictionaries.
    '''
    # ----------------
    # Error Managment
    # ----------------
    # Verify values
    if File == None:
        Er = utl.ShowError('EDExcel', 'EDSM', 'No file was added')
        return None, None, None
    if sheet == None:
        Er = utl.ShowError('EDExcel', 'EDSM', 'No sheet was added')
        return None, None, None
    if num_NaN == None:
        flagnumNaN = False
    else:
        flagnumNaN = True
        if isinstance(num_NaN, str):
            Er = utl.ShowError('EDExcel', 'EDSM', 'num_NaN must be a number')
            return None, None, None
        num_NaN = float(num_NaN)
    # ----------------
    # Data extraction
    # ----------------
    flagName = False
    flagcolDates = False
    flagcolData = False
    if isinstance(sheet, str):
        flagName = True
    if colDates == None:
        flagcolDates = False
    if colData == None:
        flagcolData = False

    # Open Excel File using xldr
    B = xlrd.open_workbook(File)
    # Se carga la página en donde se encuentra el información
    if flagName:
        S = B.sheet_by_name(sheet)
    else:
        S = B.sheet_by_index(sheet)

    # Verify the number of columns and rows

    ncol = S.ncols
    nrow = S.nrows
    if max(colDates) > ncol - 1 or max(colData) > ncol - 1:
        Er = utl.ShowError('EDExcel', 'EDSM',
                           'column exceed dimension of the sheet')
    if row_end != None:
        if row_end > nrow - 1:
            row_end = nrow - 1
    else:
        row_end = nrow - 1

    # Header Exctraction
    if flagHeader:
        Header = S.row_values(row_skip - 1)
    Header = list(np.array(Header)[list(colDates)]) + list(
        np.array(Header)[list(colData)])
    # Extracting time
    Dates = dict()
    Data = dict()
    for iCDate, CDate in enumerate(colDates):  # Revisar Fechas
        Dates1 = S.col_values(CDate, start_rowx=row_skip, end_rowx=row_end)
        try:
            a = datetime(*xlrd.xldate_as_tuple(Dates1[0], B.datemode))
            dif1 = xlrd.xldate_as_tuple(Dates1[1],
                                        B.datemode)[3] - xlrd.xldate_as_tuple(
                                            Dates1[1], B.datemode)[3]
            dif2 = xlrd.xldate_as_tuple(Dates1[1],
                                        B.datemode)[4] - xlrd.xldate_as_tuple(
                                            Dates1[1], B.datemode)[4]
            try:
                dif3 = xlrd.xldate_as_tuple(
                    Dates1[1], B.datemode)[5] - xlrd.xldate_as_tuple(
                        Dates1[1], B.datemode)[5]
            except IndexError:
                dif3 = 0
            if dif1 == 0 and dif2 == 0 and dif3 == 0:
                Dates[CDate] = np.array([
                    datetime(*xlrd.xldate_as_tuple(i, B.datemode))
                    for i in Dates1
                ])
            else:
                Dates[CDate] = np.array([
                    datetime(*xlrd.xldate_as_tuple(i, B.datemode)).date()
                    for i in Dates1
                ])
        except:
            Data[CDate] = np.array(Dates1)
    # Exctracting Data
    for CData in colData:  # Revisar Fechas
        Data1 = S.col_values(CData, start_rowx=row_skip, end_rowx=row_end)
        Data2 = []
        # Verify data to become NaN
        for dat in Data1:
            try:
                Data2.append(float(dat))
            except ValueError:
                Data2.append(np.nan)
        Data2 = np.array(Data2)
        if flagnumNaN:
            x = np.where(Data2 == num_NaN)
            Date2[x] = np.nan
        Data[CData] = Data2

    return Dates, Data, Header
Ejemplo n.º 7
0
def EDTXT(File,
          deli=',',
          colStr=(0, ),
          colData=(1, ),
          row_skip=1,
          flagHeader=True,
          rowH=0,
          row_end=0,
          str_NaN=None,
          num_NaN=None,
          dtypeData=float):
    '''
    DESCRIPTION:

        This function extract data series from a plain text file or a csv
        type file.
    _______________________________________________________________________

    INPUT:
        :param File:       A str, File that needs to be open with 
                                  extention.
        :param deli:       A str, Delimiter of the data. Defaulted to ','.
        :param colStr:     A tuple, tuple sequence with the columns where
                                    the string data is. Defaulted to (0).
        :param colData:    A tuple, tuple sequence with the columns where 
                                    the floar data is.  Defaulted to (1).
        :param row_skip:   An int, begining row. Defaulted to 0.
        :param flagHeader: A boolean, flag to get the header of the 
                                      information.
        :param rowH:       A str, Header row.
        :param row_end:    A str, Ending row if nedded, defaulted to None.
        :param str_NaN:    A str, NaN string for data. Defaulted to None.
        :param num_NaN:    A str, NaN number for data. Defaulted to None.
        :param dtypeData:  A str, data type. Defaulted to float.
    _______________________________________________________________________
    OUTPUT:
        Dates and values are given in dictionaries.
    '''
    # ----------------
    # Error Managment
    # ----------------
    if not (isinstance(colStr, tuple)) and not (isinstance(
            colStr, list)) and colStr != None:
        Er = utl.ShowError('EDTXT', 'EMSD.Extract_Data',
                           'colStr not in tuple or list')
        raise TypeError
    elif not (isinstance(colStr, tuple)) and colStr != None:
        colStr = tuple(colStr)
    if not (isinstance(colData, tuple)) and not (isinstance(
            colData, list)) and colData != None:
        Er = utl.ShowError('EDTXT', 'EMSD.Extract_Data',
                           'colData not in tuple or list')
        raise TypeError
    elif not (isinstance(colData, tuple)) and colStr != None:
        colData = tuple(colData)

    # Verify values
    if num_NaN == None:
        flagnumNaN = False
    else:
        flagnumNaN = True
        if isinstance(num_NaN, str) == False:
            num_NaN = float(num_NaN)
    if str_NaN == None:
        flagstrNaN = False
    else:
        flagstrNaN = True

    # -------------------
    # Extracting Values
    # -------------------
    # Headers
    if flagHeader:
        if colStr == None and colData == None:
            Headers = np.genfromtxt(File,
                                    dtype=str,
                                    skip_header=rowH,
                                    delimiter=deli,
                                    max_rows=1)
        elif colStr != None and colData == None:
            Headers = np.genfromtxt(File,
                                    dtype=str,
                                    usecols=colStr,
                                    skip_header=rowH,
                                    delimiter=deli,
                                    max_rows=1)
        elif colData != None and colStr == None:
            Headers = np.genfromtxt(File,
                                    dtype=str,
                                    usecols=colData,
                                    skip_header=rowH,
                                    delimiter=deli,
                                    max_rows=1)
        else:
            Headers = np.genfromtxt(File,
                                    dtype=str,
                                    usecols=colStr + colData,
                                    skip_header=rowH,
                                    delimiter=deli,
                                    max_rows=1)
    else:
        rowH = 0
        if colStr == None and colData == None:
            Headers = np.genfromtxt(File,
                                    dtype=str,
                                    skip_header=rowH,
                                    delimiter=deli,
                                    max_rows=1)
        elif colStr == None:
            Headers = np.genfromtxt(File,
                                    dtype=str,
                                    usecols=colStr,
                                    skip_header=rowH,
                                    delimiter=deli,
                                    max_rows=1)
        elif colData == None:
            Headers = np.genfromtxt(File,
                                    dtype=str,
                                    usecols=colData,
                                    skip_header=rowH,
                                    delimiter=deli,
                                    max_rows=1)
        else:
            Headers = np.genfromtxt(File,
                                    dtype=str,
                                    usecols=colStr + colData,
                                    skip_header=rowH,
                                    delimiter=deli,
                                    max_rows=1)
        Headers = np.arange(0, len(Headers))

    # R = {'Headers':Headers}
    R = dict()

    # String Data
    if colStr != None:
        DataStr = np.genfromtxt(File,
                                dtype=str,
                                usecols=colStr,
                                delimiter=deli,
                                skip_header=row_skip,
                                skip_footer=row_end,
                                unpack=True,
                                encoding='utf-8')
        if flagstrNaN:
            DataStr[DataStr == str_NaN] = 'nan'
        if len(colStr) == 1:
            R[Headers[0]] = DataStr
        elif len(colStr) > 1:
            for icol, col in enumerate(colStr):
                R[Headers[icol]] = DataStr[icol]

    if colData != None:
        Data = np.genfromtxt(File,
                             dtype=dtypeData,
                             usecols=colData,
                             delimiter=deli,
                             skip_header=row_skip,
                             skip_footer=row_end,
                             unpack=True)
        if flagnumNaN:
            Data[Data == num_NaN] = np.nan

        if len(colData) == 1:
            if colStr == None:
                R[Headers[icol]] = Data
            else:
                R[Headers[len(colStr) + 1]] = Data
        elif len(colData) > 1:
            for icol, col in enumerate(colData):
                if colStr == None:
                    R[Headers[icol]] = Data[icol]
                else:
                    R[Headers[len(colStr) + icol]] = Data[icol]

    elif colData == None and colStr == None:
        Data = np.genfromtxt(File,
                             dtype=dtypeData,
                             delimiter=deli,
                             skip_header=row_skip,
                             skip_footer=row_end,
                             unpack=True)
        if flagnumNaN:
            Data[Data == num_NaN] = np.nan
        for icol, col in enumerate(Headers):
            R[Headers[icol]] = Data[icol]

    return R
Ejemplo n.º 8
0
def St_Document(Pathout='',
                Name='Stations_Info',
                St_Info_Dict=None,
                Data_Flags=None):
    '''
    DESCRIPTION:

        This function saves the station information in an Excel (.xlsx)
        worksheet.
    _______________________________________________________________________

    INPUT:
        + Pathout: Saving directory.
        + Name: File Name.
        + St_Info_Dict: Dictionary with the information of the stations.
                        It must have the following information:
                        CODE: Station code.
                        NAME: Station name.
                        ELEVATION: Station Elevation.
                        LATITUDE: Station latitud.
                        LONGITUDE: Station Longitude.
        + Data_Flags: Possible flags that the data has defaulted to None.
    _______________________________________________________________________
    
    OUTPUT:
    
        Return a document.
    '''
    if St_Info_Dict == None and self.St_Info == None:
        Er = utl.ShowError('St_Document', 'EDSM', 'No station data added')
        return
    elif St_Info_Dict == None:
        St_Info_Dict = self.St_Info

    keys = ['CODE', 'NAME', 'ELEVATION', 'LATITUDE', 'LONGITUDE']

    St_Info_Dict['LATITUDE'] = list(St_Info_Dict['LATITUDE'])
    St_Info_Dict['LONGITUDE'] = list(St_Info_Dict['LONGITUDE'])

    for i in range(len(St_Info_Dict['LATITUDE'])):
        if St_Info_Dict['LATITUDE'][i][-1] == 'N':
            St_Info_Dict['LATITUDE'][i] = float(
                St_Info_Dict['LATITUDE'][i][:5])
        elif St_Info_Dict['LATITUDE'][i][-1] == 'S':
            St_Info_Dict['LATITUDE'][i] = float(
                '-' + St_Info_Dict['LATITUDE'][i][:5])

    for i in range(len(St_Info_Dict['LONGITUDE'])):
        if St_Info_Dict['LONGITUDE'][i][-1] == 'E':
            St_Info_Dict['LONGITUDE'][i] = float(
                St_Info_Dict['LONGITUDE'][i][:5])
        elif St_Info_Dict['LONGITUDE'][i][-1] == 'W':
            St_Info_Dict['LONGITUDE'][i] = float(
                '-' + St_Info_Dict['LONGITUDE'][i][:5])

    Nameout = Pathout + Name + '.xlsx'
    W = xlsxwl.Workbook(Nameout)
    # Stations Sheet
    WS = W.add_worksheet('STATIONS')
    # Cell formats
    Title = W.add_format({'bold': True,'align': 'center','valign': 'vcenter'\
        ,'font_name':'Arial','font_size':11,'top':1,'bottom':1,'right':1\
        ,'left':1})
    Data_Format = W.add_format({'bold': False,'align': 'left','valign': 'vcenter'\
        ,'font_name':'Arial','font_size':11,'top':1,'bottom':1,'right':1\
        ,'left':1})
    # Column Formats
    WS.set_column(1, 3, 20.0)
    WS.set_column(1, 3, 20.0)
    WS.set_column(1, 4, 15.0)
    WS.set_column(1, 5, 15.0)
    # Titles
    WS.write(1, 1, 'CODE', Title)
    WS.write(1, 2, 'NAME', Title)
    WS.write(1, 3, 'ELEVATION', Title)
    WS.write(1, 4, 'LATITUDE (N)', Title)
    WS.write(1, 5, 'LONGITUDE (E)', Title)

    Col = 1
    for key in keys:
        Row = 2
        for dat in St_Info_Dict[key]:
            WS.write(Row, Col, dat, Data_Format)
            Row += 1
        Col += 1

    if Data_Flags == None and self.FlagM == None:
        W.close()
        return
    elif Data_Flags == None:
        Data_Flags = self.FlagM
        WS = W.add_worksheet('FLAGS')
        WS.write(1, 1, 'FLAGS', Title)
        WS.set_column(1, 1, 20.0)
        Row = 2
        for flag in Data_Flags:
            WS.write(Row, 1, flag, Data_Format)
            Row += 1
    W.close()
    return
Ejemplo n.º 9
0
    def __init__(self, PathData, Stations=None):
        '''
        DESCRIPTION:
            Class Constructor.
        '''
        # -------------------------
        # Error Managment
        # -------------------------
        if not (isinstance(Stations, list)) and not (isinstance(
                Stations, str)) and Stations != None:
            r = utl.ShowError('OpenWundergrounds', '__init__',
                              'Erroneus type for Station information')
            raise TypeError
        # -------------------------
        # Parameters
        # -------------------------
        self.PathData = PathData

        self.deli = ','  # Delimeter
        # Important Variables
        # String Variables
        self.LabelsStr = np.array(['Time'])
        self.LabelsData = np.array([
            'TemperatureC', 'DewpointC', 'PressurehPa', 'WindSpeedKMH',
            'WindDirectionDegrees', 'HourlyPrecipMM',
            'SolarRadiationWatts/m^2', 'Humidity'
        ])

        self.LabDataOper = {
            'TemperatureC': 'mean',
            'DewpointC': 'mean',
            'PressurehPa': 'mean',
            'WindSpeedKMH': 'mean',
            'WindDirectionDegrees': 'mean',
            'HourlyPrecipMM': 'sum',
            'SolarRadiationWatts/m^2': 'mean',
            'Humidity': 'mean'
        }

        self.LabDataOper1 = {
            'TemperatureC': 'mean',
            'DewpointC': 'mean',
            'PressurehPa': 'mean',
            'WindSpeedKMH': 'mean',
            'WindDirectionDegrees': 'mean',
            'HourlyPrecipMM': 'sum',
            'SolarRadiationWatts/m^2': 'mean',
            'Humidity': 'mean'
        }

        self.LabDataOper2 = {
            'TemperatureC': 'mean',
            'DewpointC': 'mean',
            'PressurehPa': 'mean',
            'WindSpeedKMH': 'mean',
            'WindDirectionDegrees': 'mean',
            'HourlyPrecipMM': 'sum',
            'SolarRadiationWatts/m^2': 'sum',
            'Humidity': 'mean'
        }

        self.LabDataOper3 = {
            'TemperatureC': 'mean',
            'DewpointC': 'mean',
            'PressurehPa': 'mean',
            'WindSpeedKMH': 'mean',
            'WindDirectionDegrees': 'mean',
            'HourlyPrecipMM': 'sum',
            'SolarRadiationWatts/m^2': 'mean',
            'Humidity': 'mean'
        }

        self.LabDataSave = {
            'TemperatureC': 'TC',
            'DewpointC': 'Td',
            'PressurehPa': 'PresC',
            'WindSpeedKMH': 'WSC',
            'WindDirectionDegrees': 'WDC',
            'HourlyPrecipMM': 'PrecC',
            'SolarRadiationWatts/m^2': 'RSC',
            'Humidity': 'HRC'
        }

        self.ElimOver = {
            'TC': None,
            'Td': None,
            'PresC': None,
            'WSC': None,
            'WDC': None,
            'PrecC': None,
            'RSC': None,
            'HRC': None
        }

        self.ElimLow = {
            'TC': None,
            'Td': None,
            'PresC': None,
            'WSC': None,
            'WDC': None,
            'PrecC': None,
            'RSC': None,
            'HRC': None
        }
        self.LabelsWithUnits = {
            'TC': 'Temperatura [°C]',
            'Td': 'Punto de Rocio [°C]',
            'PresC': 'Presión [hPa]',
            'WSC': 'Vel. Viento [m/s]',
            'WDC': 'Dirección del Viento [Grados]',
            'PrecC': 'Precipitación [mm]',
            'RSC': r'Radiación Solar [W/m$^2$]',
            'HRC': 'Hum. Rel. [%]'
        }

        self.LabelsNoUnits = {
            'TC': 'Temperatura',
            'Td': 'Punto de Rocio',
            'PresC': 'Presión',
            'WSC': 'Vel. Viento',
            'WDC': 'Dirección del Viento',
            'PrecC': 'Precipitación',
            'RSC': r'Radiación Solar',
            'HRC': 'Hum. Rel.'
        }

        self.LabelsColors = {
            'TC': 'r',
            'Td': 'r',
            'PresC': 'k',
            'WSC': 'k',
            'WDC': 'k',
            'PrecC': 'b',
            'RSC': 'y',
            'HRC': 'g'
        }
        # -------------------------
        # Get Stations
        # -------------------------
        if Stations == None:
            Stations = utl.GetFolders(PathData)
        elif isinstance(Stations, str):
            Stations = [Stations]

        # Stations
        self.Stations = Stations
        self.Arch = dict()
        for St in Stations:
            self.Arch[St] = gl.glob(PathData + St + '/*.txt')
            if len(self.Arch[St]) == 0:
                print('In Station', St, 'No data was found, review station')

        return
Ejemplo n.º 10
0
def CiclD(Var, Years=None, Dates=None, DTH=24, flagZeros=False):
    '''
    DESCRIPTION:
    
        This function calculates the diurnal cycle of a variable from the 
        hourly data of a time series, it would obtain the monthly diurnal 
        cycle from all the different months and the total.
    _____________________________________________________________________

    INPUT:

        :param Var:       A list or ndarray, Variable that need to be 
                                             treated.
        :param Years:     A list or ndarray, Vector with the begining year and the ending year.
        :param Dates:     A list or ndarray, Vector with the dates in string 
                                             format yyyy/mm/dd HHMM.
        :param DTH:       A int, Determine the number of data that 
                                 takes to complete a day it is 
                                 defaulted to 24.
        :param flagZeros: A boolean, flag to know if the zeros are taking
                                     into account.
    _____________________________________________________________________
    
    OUTPUT:

        :return Resutls: A dict, The dictionary has:
            :return MonthsM:  A dict, Dictionary with the Monthly data.
            :return MonthsMM: A dict, Dictionary with the results 
                                      per month.
            :return MonthsME: A dict, Dictionary with the mean errors 
                                      per month.
            :return CiDT:     A ndarray, Mean complete diurnal cycle.
            :return ErrT:     A ndarray, Mean Errors.
    '''

    # Errors
    if Years == None and Dates == None:
        Er = utl.ShowError('CicloD', 'An_Hydro',
                           'No dates nor years were added')
        return
    elif Dates == None:
        FlagYears = True
    else:
        FlagYears = False

    # Variables
    # Months
    MonthsM = dict()  # Data
    MonthsMM = dict()  # Mean Data
    MonthsMMM = dict()
    MonthsMD = dict()  # Standard deviation
    MonthsME = dict()  # Median Error
    TriM = dict()  # Data trimestrales
    TriMM = dict()  # Mean trimestral
    TriMD = dict()  # Standard Deviation trimestral
    TriME = dict()  # Median Error trimestral

    # Dates
    if FlagYears:
        Yi = int(Years[0])
        Yf = int(Years[1])
        dt = timedelta(0, 24 / DTH * 3600)
        DateI = datetime(Years[0], 1, 1, 0, 0)
        DateI = datetime(Years[1], 12, 31, 23, 59)
        Date = DUtil.Dates_Comp(DateI, DateE, dtm=dt)
    elif isinstance(Dates[0], str):
        Date = DUtil.Dates_str2datetime(Dates)
    else:
        Date = Dates

    Months = np.array([i.month for i in Date])
    if flagZeros:
        q = np.where(Var == 0)
        Var[q] = np.nan

    # Months
    MaxV = []
    MinV = []
    for i in range(1, 13):
        x = np.where(Months == i)
        MonthsM[i] = np.reshape(np.copy(Var)[x], (-1, DTH))
        MonthsMM[i], MonthsMD[i], MonthsME[i] = MeanError(MonthsM[i], axis=0)
        MaxV.append(np.nanmax(MonthsMM[i]) + np.nanmax(MonthsME[i] * 1.2))
        MinV.append(
            np.nanmin(MonthsMM[i]) - np.abs(np.nanmin(MonthsME[i] * 1.2)))

    Trimes = {1: [12, 1, 2], 2: [3, 4, 5], 3: [6, 7, 8], 4: [9, 10, 11]}
    for i in range(1, 5):
        TriM[i] = np.reshape(
            np.copy(Var)[(Months == Trimes[i][0]) | (Months == Trimes[i][1]) |
                         (Months == Trimes[i][2])], (-1, DTH))
        TriMM[i], TriMD[i], TriME[i] = MeanError(TriM[i], axis=0)

    # Reshaped variable
    VarM = np.reshape(np.copy(Var), (-1, DTH))
    CiDT, DesT, ErrT = MeanError(VarM, axis=0)
    Results = {
        'MonthsM': MonthsM,
        'MonthsMM': MonthsMM,
        'MonthsME': MonthsME,
        'CiDT': CiDT,
        'ErrT': ErrT,
        'DesT': DesT,
        'TriM': TriM,
        'TriMM': TriMM,
        'TriMD': TriMD,
        'TriME': TriME
    }

    return Results
Ejemplo n.º 11
0
def EDnetCDFFile(File, VarDict=None, VarRangeDict=None):
    '''
    DESCRIPTION:

        With this function the information of an IDEAM type file can 
        be extracted.
    _______________________________________________________________________

    INPUT:
        + File: File that would be extracted including the path.
        + VarDict: List of variables that would be extracted from the 
                   netCDF file. Defaulted to None.
        + VarRangeDict: Range of data that would be extracted per 
                        variable. It is defaulted to None if all the 
                        Range wants to be extracted.
                        It must be a list with two values for each variable.
    _______________________________________________________________________
    
    OUTPUT:
        - Data: Extracted Data Dictionary.    
    '''
    # Importing netCDF libary
    try:
        import netCDF4 as nc
    except:
        Er = utl.ShowError(
            'EDNCFile', 'EDSM',
            'netCDF4 not installed, please install the library to continue')
        raise Er

    # Open File
    dataset = nc.Dataset(File)

    if VarDict == None:
        Data = dataset.variables
    else:
        Data = dict()
        for iVar, Var in enumerate(VarDict):
            try:
                P = dataset.variables[Var]
            except KeyError:
                Er = utl.ShowError('EDNCFile', 'EDSM',
                                   'Key %s not in the nc file.' % Var)
                raise Er
            if VarRangeDict == None:
                if Var == 'time':
                    Data[Var] = nc.num2date(dataset.variables[Var][:],
                                            dataset.variables[Var].units,
                                            dataset.variables[Var].calendar)
                else:
                    Data[Var] = dataset.variables[Var][:]
            else:
                a = dataset.variables[Var]  # Variable
                dimensions = a.dimensions  # Keys of dimensions
                LenD = len(dimensions)  # Number of dimensions
                totalshape = a.shape  # Shape of the matrix

                Range = dict()
                for iVarR, VarR in enumerate(dimensions):
                    try:
                        Range[VarR] = VarRangeDict[VarR]
                    except:
                        Range[VarR] = [0, dataset.variables[VarR].shape[0]]

                if LenD == 1:
                    if Var == 'time':
                        Data[Var] = nc.num2date(
                            dataset.variables[Var][:],
                            dataset.variables[Var].units,
                            dataset.variables[Var].calendar)[slice(
                                Range[dimensions[0]][0],
                                Range[dimensions[0]][1])]
                    else:
                        Data[Var] = dataset.variables[Var][slice(
                            Range[dimensions[0]][0], Range[dimensions[0]][1])]
                elif LenD == 2:
                    Data[Var] = dataset.variables[Var][slice(
                        Range[dimensions[0]][0],
                        Range[dimensions[0]][1])][slice(
                            Range[dimensions[1]][0], Range[dimensions[1]][1])]
                elif LenD == 3:
                    Data[Var] = dataset.variables[Var][slice(
                        Range[dimensions[0]][0],
                        Range[dimensions[0]][1])][slice(
                            Range[dimensions[1]][0],
                            Range[dimensions[1]][1])][slice(
                                Range[dimensions[2]][0],
                                Range[dimensions[2]][1])]
                elif LenD == 4:
                    Data[Var] = dataset.variables[Var][slice(
                        Range[dimensions[0]][0],
                        Range[dimensions[0]][1])][slice(
                            Range[dimensions[1]][0],
                            Range[dimensions[1]][1])][slice(
                                Range[dimensions[2]][0],
                                Range[dimensions[2]][1])][slice(
                                    Range[dimensions[3]][0],
                                    Range[dimensions[3]][1])]
                elif LenD == 5:
                    Data[Var] = dataset.variables[Var][slice(
                        Range[dimensions[0]][0],
                        Range[dimensions[0]][1])][slice(
                            Range[dimensions[1]][0],
                            Range[dimensions[1]][1])][slice(
                                Range[dimensions[2]][0],
                                Range[dimensions[2]][1])][slice(
                                    Range[dimensions[3]][0],
                                    Range[dimensions[3]][1])][slice(
                                        Range[dimensions[4]][0],
                                        Range[dimensions[4]][1])]

        dataset.close()

    return Data
Ejemplo n.º 12
0
def Ca_E(FechaC,V1C,dt=24,escala=1,op='mean',flagMa=False,flagDF=False,flagNaN=True):
    '''
    DESCRIPTION:
    
        Con esta función se pretende cambiar de escala temporal los datos,
        agregándolos a diferentes escalas temporales, se deben insertar series
        completas de tiempo.

        Los datos faltantes deben estar como NaN.
    _______________________________________________________________________

    INPUT:
        + FechaC: Fecha de los datos organizada como 'año/mes/dia - HHMM' 
                  los '/' pueden ser substituidos por cualquier caracter. 
                  Debe ser un vector string y debe tener años enteros.
        + V1C: Variable que se desea cambiar de escala temporal. 
        + dt: Delta de tiempo para realizar la agregación, depende de 
              la naturaleza de los datos.
              Si se necesitan datos mensuales, el valor del dt debe ser 1.
        + escala: Escala a la cual se quieren pasar los datos:
                -1: de minutal.
                0: horario.
                1: a diario.
                2: a mensual, es necesario llevarlo primero a escala diaria.
        + op: Es la operación que se va a llevar a cabo para por ahora solo responde a:
              'mean': para obtener el promedio.
              'sum': para obtener la suma.
        + flagMa: Para ver si se quieren los valores máximos y mínimos.
                True: Para obtenerlos.
                False: Para no calcularos.
        + flagDF: Para ver si se quieren los datos faltantes por mes, solo funciona
                  en los datos que se dan diarios.
                True: Para calcularlos.
                False: Para no calcularos.
        + flagNaN: Flag to know if the user wants to include the calculations with low data.
    _______________________________________________________________________
    
    OUTPUT:
        - FechaEs: Nuevas fechas escaladas.
        - FechaNN: Nuevas fechas escaladas como vector fechas. 
        - VE: Variable escalada.
        - VEMax: Vector de máximos.
        - VEMin: Vector de mínimos.
    '''
    # Se desactivan los warnings en este codigo para que corra más rápido, los
    # warnings que se generaban eran por tener realizar promedios de datos NaN
    # no porque el código tenga un problema en los cálculos.
    warnings.filterwarnings('ignore')

    if escala > 2:
        utl.ShowError('EMSD','Ca_E','Todavía no se han programado estas escalas')

    # -------------------------------------------
    # Inicialización de variables
    # -------------------------------------------
    # Se inicializan las variables que se utilizarán
    FechaNN = ["" for k in range(1)]
    FechaEs = ["" for k in range(1)]
    VE = []
    VEMax = []
    VEMin = []

    NF = [] # Porcentaje de datos faltantes
    NNF = [] # Porcentaje de datos no faltantes
    rr = 0

    Oper = {'sum':np.nansum,'mean':np.nanmean}

    # -------------------------------------------
    # Vector de fechas
    # -------------------------------------------

    # Se toman los años
    yeari = int(FechaC[0][0:4]) # Año inicial
    yearf = int(FechaC[len(FechaC)-1][0:4]) # Año final
    Sep = FechaC[0][4] # Separador de la Fecha
    if isinstance(FechaC[0],str):
        DatesO = DUtil.Dates_str2datetime(FechaC)
    else:
        DatesO = FechaC

    # Los años se toman para generar el output de FechasEs
    if escala == -1:
        DateI = datetime(DatesO[0].year,1,1,0,0)
        DateE = datetime(DatesO[-1].year,12,31,23,59)
        dtm = timedelta(0,dt*60)
        FechaNN = DUtil.Dates_Comp(DateI,DateE,dtm=dtm)
        FechaEs = DUtil.Dates_datetime2str(FechaNN)
    elif escala == 0:
        DateI = datetime(DatesO[0].year,1,1,0,0)
        DateE = datetime(DatesO[-1].year,12,31,23,59)
        dtm = timedelta(0,60*60)
        FechaNN = DUtil.Dates_Comp(DateI,DateE,dtm=dtm)
        FechaEs = DUtil.Dates_datetime2str(FechaNN)
    elif escala == 1: # Para datos horarios o diarios
        for result in perdelta(date(int(yeari), 1, 1), date(int(yearf)+1, 1, 1), timedelta(days=1)):
            FR = result.strftime('%Y'+Sep+'%m'+Sep+'%d') # Fecha
            if escala == 0:
                for i in range(0,24):
                    if rr == 0:
                        FechaNN[0] = result
                        if i < 10:
                            FechaEs[rr] = FR + '-0' +str(i)+'00'
                        else:
                            FechaEs[rr] = FR + '-' +str(i)+'00'
                    else:
                        FechaNN.append(result)
                        if i < 10:
                            FechaEs.append(FR + '-0' +str(i)+'00')
                        else:
                            FechaEs.append(FR + '-' +str(i)+'00')
                    rr += 1 # Se suman las filas
            elif escala == 1:
                if rr == 0:
                    FechaNN[0] = result
                    FechaEs[rr] = FR
                else:
                    FechaNN.append(result)
                    FechaEs.append(FR)
                rr += 1
    if escala == 2:
        x = 0
        for i in range(int(yeari),int(yearf)+1):
            for j in range(1,13):
                if i == int(yeari) and j == 1:
                    FechaNN[0] = date(i,j,1)
                    FechaEs[0] = FechaNN[0].strftime('%Y'+Sep+'%m')
                else:
                    FechaNN.append(date(i,j,1))
                    FechaEs.append(FechaNN[x].strftime('%Y'+Sep+'%m'))
                x += 1
    # -------------------------------------------
    # Cálculo del escalamiento
    # -------------------------------------------
    dtt = 0 # Contador de la diferencia
    if op == 'mean':
        if escala == 0 or escala == -1 or escala == 1: 
            # Ciclo para realizar el agregamiento de los datos
            for i in range(0,len(V1C),dt): 
                dtt = dtt + dt # Se aumenta el número de filas en el contador
                q = np.isnan(V1C[i:dtt])
                qq = sum(q)
                qYes = sum(~np.isnan(V1C[i:dtt]))
                if (qq > dt*0.30 and flagNaN) or qYes == 0:
                    VE.append(np.nan)
                    if flagMa == True:
                        VEMax.append(np.nan)
                        VEMin.append(np.nan)
                else:
                    try:
                        VE.append(float(np.nanmean(V1C[i:dtt])))
                    except ValueError:
                        VE.append(np.nan)
                    if flagMa == True:
                        try:
                            VEMax.append(float(np.nanmax(V1C[i:dtt])))
                        except ValueError:
                            VEMax.append(np.nan)
                        try:
                            VEMin.append(float(np.nanmin(V1C[i:dtt])))
                        except ValueError:
                            VEMin.append(np.nan)

    elif op == 'sum':
        if escala == 0 or escala == -1 or escala == 1: 
            # Ciclo para realizar el agregamiento de los datos
            for i in range(0,len(V1C),dt): 
                dtt = dtt + dt # Se aumenta el número de filas en el contador
                q = np.isnan(V1C[i:dtt])
                qq = sum(q)
                qYes = sum(~np.isnan(V1C[i:dtt]))
                if (qq > dt*0.30 and flagNaN) or qYes == 0:
                    VE.append(np.nan)
                    if flagMa == True:
                        VEMax.append(np.nan)
                        VEMin.append(np.nan)
                else:
                    try:
                        VE.append(float(np.nansum(V1C[i:dtt])))
                    except ValueError:
                        VE.append(np.nan)
                    if flagMa == True:
                        try:
                            VEMax.append(float(np.nanmax(V1C[i:dtt])))
                        except ValueError:
                            VEMax.append(np.nan)
                        try:
                            VEMin.append(float(np.nanmin(V1C[i:dtt])))
                        except ValueError:
                            VEMin.append(np.nan)

    if escala == 2:
        YearMonthData = np.array([str(i.year)+'/'+str(i.month) for i in DatesO])
        YearMonth = np.array([str(date(i,j,1).year)+'/'+str(date(i,j,1).month) for i in range(int(yeari),int(yearf)+1) for j in range(1,13)])
        VE = np.empty(YearMonth.shape)*np.nan
        VEMax = np.empty(YearMonth.shape)*np.nan
        VEMin = np.empty(YearMonth.shape)*np.nan

        NF = np.empty(YearMonth.shape)*np.nan
        NNF = np.empty(YearMonth.shape)*np.nan

        for iYM, YM in enumerate(YearMonth):  
            x = np.where(YearMonthData == YM)[0]
            if len(x) != 0:
                q = sum(~np.isnan(V1C[x]))
                NF[iYM] = (q/len(x))
                NNF[iYM] = (1-NF[-1])
                if q >= round(len(x)*0.7,0) and flagNaN:
                    VE[iYM] = Oper[op](V1C[x])
                    VEMax[iYM] = np.nanmax(V1C[x])
                    VEMin[iYM] = np.nanmin(V1C[x])

    # -------------------------------------------
    # Se dan los resultados
    # -------------------------------------------
    if flagMa == True:
        if  flagDF:
            return np.array(FechaEs), np.array(FechaNN), np.array(VE), np.array(VEMax), np.array(VEMin), np.array(NF),np.array(NNF)
        else:
            return np.array(FechaEs), np.array(FechaNN), np.array(VE), np.array(VEMax), np.array(VEMin)
    elif flagMa == False:
        if flagDF:
            return np.array(FechaEs), np.array(FechaNN), np.array(VE),np.array(NF),np.array(NNF)
        else:
            return np.array(FechaEs), np.array(FechaNN), np.array(VE)
Ejemplo n.º 13
0
def CiclA(VMes, Years, flagA=False, oper='mean'):
    '''
    DESCRIPTION:
        This function calculates the annual cycle of a variable, also
        calculates the annual series of requiered.

        Additionally, this function can makes graphs of the annual cycle 
        and the annual series if asked.
    _______________________________________________________________________

    INPUT:
        :param VMes:  A ndarray, Variable with the monthly data.
        :param Years: A list or ndarray, Vector with the initial and 
                                         final year.
        :param flagA: A boolean, flag to know if the annual series 
                                 is requiered.
        :param oper:  A ndarray, Operation for the annual data.
    _______________________________________________________________________
    
    OUTPUT:

    '''

    # --------------------
    # Error Managment
    # --------------------
    if len(Years) > 2:
        return utl.ShowError(
            'CiclA', 'Hydro_Analysis',
            'Years index vector larger than 2, review vector')

    # --------------------
    # Years Managment
    # --------------------
    Yi = int(Years[0])
    Yf = int(Years[1])
    VarM = np.reshape(VMes, (-1, 12))
    # --------------------
    # Annual Cycle
    # --------------------
    # Verify NaN data from the cycle
    MesM = np.empty(12)
    VarMNT = []
    for i in range(12):
        q = sum(~np.isnan(VarM[:, i]))
        VarMNT.append(sum(~np.isnan(VarM[:, i])))
        if q < round(len(VarM[:, i]) * 0.70, 0):
            MesM[i] = np.nan
        else:
            MesM[i] = np.nanmean(VarM[:, i])  # Multianual Mean

    MesD = np.nanstd(VarM, axis=0)  # annual strandard deviation.
    MesE = np.array([k / np.sqrt(VarMNT[ii])
                     for ii, k in enumerate(MesD)])  # annual Error

    # --------------------
    # Annual Series
    # --------------------
    if flagA:
        # Determine operation
        Operation = DM.Oper_Det(oper)
        # ----------------
        # Error managment
        # ----------------
        if Operation == -1:
            return -1
        # Calculations
        AnM = np.empty(VarM.shape[0])
        AnMNT = []
        for i in range(VarM.shape[0]):
            q = sum(~np.isnan(VarM[i, :]))
            if q <= len(VarM[i, :]) * 0.70:
                AnM[i] = np.nan
                AnMNT.append(np.nan)
            else:
                AnM[i] = Operation(VarM[i, :])
                AnMNT.append(q)

        AnD = np.nanstd(VarM, axis=1)  # Annual deviation
        AnE = np.array([k / np.sqrt(AnMNT[ii])
                        for ii, k in enumerate(AnD)])  # Annual Error

    # Return values
    results = dict()

    if flagA:
        results['MesM'] = MesM
        results['MesD'] = MesD
        results['MesE'] = MesE
        results['AnM'] = AnM
        results['AnD'] = AnD
        results['AnE'] = AnE
        return results
    else:
        results['MesM'] = MesM
        results['MesD'] = MesD
        results['MesE'] = MesE
        return results
Ejemplo n.º 14
0
def EDnetCDFFile(File,
                 VarDict=None,
                 VarRangeDict=None,
                 time='time',
                 DateI=None):
    '''
    DESCRIPTION:

        With this function the information of an netCDF type file can 
        be extracted.
    _______________________________________________________________________

    INPUT:
        :param File:         A str, File that would be extracted including 
                             the path.
        :param VarDict:      A dict, List of variables that would be 
                             extracted from the netCDF file. Defaulted 
                             to None.
        :param VarRangeDict: A dict, Range of data that would be extracted 
                             per variable. It is defaulted to None if all 
                             the Range wants to be extracted.
                             It must be a list with two values for each 
                             variable.
        :param time:         A str, key string of the time data in the
                             NetCDF file.
        :param DateI:        A date or datetime, object with the initial
                             date.
    _______________________________________________________________________
    
    OUTPUT:
        :return Data: A dict, Extracted Data Dictionary.    
    '''
    # Importing netCDF libary
    try:
        import netCDF4 as nc
    except ImportError:
        utl.ShowError(
            'EDNCFile', 'EDSM',
            'netCDF4 not installed, please install the library to continue')

    # Open File
    dataset = nc.Dataset(File)

    if VarDict == None:
        Data = dataset.variables
    else:
        Data = dict()
        for iVar, Var in enumerate(VarDict):
            try:
                P = dataset.variables[Var]
            except KeyError:
                utl.ShowError('EDNCFile', 'EDSM',
                              'Key %s not in the nc file.' % Var)
            if VarRangeDict == None:
                if Var == time:
                    if dataset.variables[Var].calendar == '360':
                        FlagMonths = False
                        MStr = re.compile('months')
                        MMatch = re.search(MStr, dataset.variables[Var].units)
                        Data[Var] = []
                        if not (MMatch is None):
                            if not (DateI is None):
                                for An in range(
                                        DateI.year, DateI.year + int(
                                            len(dataset.variables[Var][:]) /
                                            12)):
                                    for M in range(1, 13):
                                        if M < DateI.month and A == DateI.year:
                                            continue
                                        else:
                                            Data[Var].append(date(An, M, 1))
                                Data[Var] = np.array(Data[Var])

                    else:
                        Data[Var] = nc.num2date(
                            dataset.variables[Var][:],
                            dataset.variables[Var].units,
                            dataset.variables[Var].calendar)
                else:
                    Data[Var] = dataset.variables[Var][:]
            else:
                a = dataset.variables[Var]  # Variable
                dimensions = a.dimensions  # Keys of dimensions
                LenD = len(dimensions)  # Number of dimensions
                totalshape = a.shape  # Shape of the matrix

                Range = dict()
                for iVarR, VarR in enumerate(dimensions):
                    try:
                        Range[VarR] = VarRangeDict[VarR]
                    except:
                        Range[VarR] = [0, dataset.variables[VarR].shape[0]]

                if LenD == 1:
                    if Var == time:
                        Data[Var] = nc.num2date(
                            dataset.variables[Var][:],
                            dataset.variables[Var].units,
                            dataset.variables[Var].calendar)[slice(
                                Range[dimensions[0]][0],
                                Range[dimensions[0]][1])]
                    else:
                        Data[Var] = dataset.variables[Var][slice(
                            Range[dimensions[0]][0], Range[dimensions[0]][1])]
                elif LenD == 2:
                    Data[Var] = dataset.variables[Var][slice(
                        Range[dimensions[0]][0], Range[dimensions[0]][1]), :]
                    Data[Var] = dataset.variables[
                        Var][:,
                             slice(Range[dimensions[1]][0], Range[
                                 dimensions[1]][1])]
                elif LenD == 3:
                    Data[Var] = dataset.variables[Var][
                        slice(Range[dimensions[0]][0], Range[dimensions[0]][1]
                              ), :, :]
                    Data[Var] = dataset.variables[
                        Var][:,
                             slice(Range[dimensions[1]][0], Range[
                                 dimensions[1]][1]), :]
                    Data[Var] = dataset.variables[
                        Var][:, :,
                             slice(Range[dimensions[2]][0], Range[
                                 dimensions[2]][1])]
                elif LenD == 4:
                    Data[Var] = dataset.variables[Var][
                        slice(Range[dimensions[0]][0], Range[dimensions[0]][1]
                              ), :, :, :]
                    Data[Var] = dataset.variables[
                        Var][:,
                             slice(Range[dimensions[1]][0], Range[
                                 dimensions[1]][1]), :, :]
                    Data[Var] = dataset.variables[
                        Var][:, :,
                             slice(Range[dimensions[2]][0], Range[
                                 dimensions[2]][1]), :]
                    Data[Var] = dataset.variables[
                        Var][:, :, :,
                             slice(Range[dimensions[3]][0], Range[
                                 dimensions[3]][1])]
                elif LenD == 5:
                    Data[Var] = dataset.variables[Var][
                        slice(Range[dimensions[0]][0], Range[dimensions[0]][1]
                              ), :, :, :, :]
                    Data[Var] = dataset.variables[
                        Var][:,
                             slice(Range[dimensions[1]][0], Range[
                                 dimensions[1]][1]), :, :, :]
                    Data[Var] = dataset.variables[
                        Var][:, :,
                             slice(Range[dimensions[2]][0], Range[
                                 dimensions[2]][1]), :, :]
                    Data[Var] = dataset.variables[
                        Var][:, :, :,
                             slice(Range[dimensions[3]][0], Range[
                                 dimensions[3]][1]), :]
                    Data[Var] = dataset.variables[
                        Var][:, :, :, :,
                             slice(Range[dimensions[4]][0], Range[
                                 dimensions[4]][1])]
        dataset.close()

    return Data
Ejemplo n.º 15
0
def PrecCount(Prec,DatesEv,dt=1,M=60):
    '''
    DESCRIPTION:
        
        This functions calculates the duration of precipitation events 
        from composites.
    _________________________________________________________________________

    INPUT:
        :param PrecC:   A ndarray, Array with composite of precipitation.
        :param DatesEv: A ndarray, Array with all the events dates, format 
                                   yyyy/mm/dd-HHMM or datetime.
        :param dt:      An int, Time delta in minutes.
        :param M:       An int, Place where the maximum of precipitation 
                                is presented.
    _________________________________________________________________________

    OUTPUT:
        :return DurPrec:    A ndarray, Precipitation duration in hours.
        :return TotalPrec:  A ndarray, Total of precipitation in that time.
        :return IntPrec:    A ndarray, Event Intensity.
        :return MaxPrec:    A ndarray, Maximum of precipitation during the event.
        :return DatesEvst:  A ndarray, Date where the event begins.
        :return DatesEvend: A ndarray, Date where the event ends.
    '''

    # --------------------------------------
    # Error managment
    # --------------------------------------

    
    # --------------------------------------
    # Dates
    # --------------------------------------
    
    # Manage Data Size
    if len(DatesEv.shape) == 1:
        if not(isinstance(DatesEv[0],str)): 
            E = utl.ShowError('PrecCount','MeteoFunctions','Not dates given, review format')
            raise E
        EvN = 1 # Events number
    else:
        if not(isinstance(DatesEv[0][0],str)): 
            E = utl.ShowError('PrecCount','MeteoFunctions','Not dates given, review format')
            raise E
        EvN = len(DatesEv) # Events number


    # Variables for benining and end of the event
    DatesEvst_Aft = []
    DatesEvend_Aft = []
    for i in range(EvN):
        if isinstance(M,list):
            MP = M[i]
        else:
            MP = M
        x = [MP]
        # Minimum of precipitation
        if dt == 1:
            MinPrec = 0.001
        else:
            MinPrec = 0.10
        # Precipitation beginning
        if EvN == 1:
            xm = np.where(Prec[:MP]<=MinPrec)[0]
        else:
            xm = np.where(Prec[i,:MP]<=MinPrec)[0]

        k = 1
        a = len(xm)-1
        I = 10
        while k == 1:   
            if dt == 1:
                if a == -1:
                    xmm = 0
                    k = 2
                    break
                while a-I < 0:
                    I -= 1
                if xm[a] == xm[a-I]+I:
                    xmm = xm[a]
                    k = 2
                else:
                    a = a-1
                    if a == 0:
                        xmm = xm[0]
                        k = 2
            elif dt == 5:
                if a == -1:
                    xmm = 0
                    k = 2
                    break
                if xm[a] == xm[a-1]+1:
                    xmm = xm[a]
                    k = 2
                else:
                    a = a-1
                    if a == 0:
                        xmm = xm[0]
                        k = 2                       
        
        # Precipitation ending
        if EvN == 1:
            xM = np.where(Prec[x[0]+1:]<=MinPrec)[0]+x[0]+1
        else:
            xM = np.where(Prec[i,x[0]+1:]<=MinPrec)[0]+x[0]+1

        k = 1
        a = 0
        while k == 1:
            aa = len(xM)
            if aa == 1 or aa == 0:
                if EvN == 1:
                    xMM = len(Prec)-1
                else:
                    xMM = len(Prec[i,:])-1
                k = 2
                break
            if dt == 1:
                # print('a',a)
                try:
                    if xM[a] == xM[a+10]-10:
                        xMM = xM[a]
                        k = 2
                    else:
                        a = a+1
                        if a == len(xM)-1:
                            xMM = xM[len(xM)-1]
                            k = 2
                except:
                    try:
                        if xM[a] == xM[a+5]-5:
                            xMM = xM[a]
                            k = 2
                        else:
                            a = a+1
                            if a == len(xM)-1:
                                xMM = xM[len(xM)-1]
                                k = 2
                    except:
                        xMM = xM[a]
                        k = 2
                        
            elif dt == 5:
                if xM[a] == xM[a+1]-1:
                    xMM = xM[a]
                    k = 2
                else:
                    a = a+1
                    if a == len(xM)-1:
                        xMM = xM[len(xM)-1]
                        k = 2
            else:
                xMM = xM[a]
                k = 2
        if EvN == 1:
            DatesEvst_Aft.append(DatesEv[xmm])
            DatesEvend_Aft.append(DatesEv[xMM])
        else:
            DatesEvst_Aft.append(DatesEv[i][xmm])
            DatesEvend_Aft.append(DatesEv[i][xMM])
    
    DatesEvst = DUtil.Dates_str2datetime(DatesEvst_Aft,Date_Format=None)
    DatesEvend = DUtil.Dates_str2datetime(DatesEvend_Aft,Date_Format=None)
    DatesEvst_Aft = np.array(DatesEvst_Aft)
    DatesEvend_Aft = np.array(DatesEvend_Aft)
    
    # ---------------
    # Calculations
    # ---------------
    # Variables
    DurPrec = []
    TotalPrec = []
    IntPrec = []
    IntPrecMax = []
    MaxPrec = []
    Pindex = []
    TasaPrec = []
    DatesMax = []

    if EvN == 1:
        # Verify event data
        q = sum(~np.isnan(Prec))
        if q <= len(DatesEv)*.90:
            DurPrec.append(np.nan)
            TotalPrec.append(np.nan)
            IntPrec.append(np.nan)
            IntPrecMax.append(np.nan)
            MaxPrec.append(np.nan)
            Pindex.append(np.nan)
            TasaPrec.append(np.nan)
            DatesMax.append(np.nan)
        else:
            # ------------------------
            # Rainfall duration
            # ------------------------
            Dxi = np.where(DatesEv == DatesEvst_Aft)[0]
            Dxf = np.where(DatesEv == DatesEvend_Aft)[0]
            DurPrec.append((Dxf[0]-Dxi[0]+1)*dt/60) # Duración en horas
            # Se verifica que haya información
            q = sum(~np.isnan(Prec[Dxi[0]:Dxf[0]+1]))
            if q <= len(Prec[Dxi[0]:Dxf[0]+1])*.50:
                DurPrec[-1] = np.nan
                TotalPrec.append(np.nan)
                IntPrec.append(np.nan)
                IntPrecMax.append(np.nan)
                MaxPrec.append(np.nan)
                Pindex.append(np.nan)
                TasaPrec.append(np.nan)
                DatesMax.append(np.nan)
            else:
                # ------------------------
                # Precipitation total
                # ------------------------
                TotalP = np.nansum(Prec[Dxi[0]:Dxf[0]+1])
                TotalPrec.append(TotalP)
                # -----------------------------
                # Mean Intensity precipitation
                # -----------------------------
                IntPrec.append(TotalP/DurPrec[-1])
                if IntPrec[-1] >= 100:
                    DurPrec[-1] = np.nan
                    TotalPrec[-1] = np.nan
                    IntPrec[-1] = np.nan
                    IntPrecMax.append(np.nan)
                    MaxPrec.append(np.nan)
                    Pindex.append(np.nan)
                    TasaPrec.append(np.nan)
                    DatesMax.append(np.nan)
                else:
                    # ------------------------
                    # Maximum Precipitation
                    # ------------------------
                    MaxPrec.append(np.nanmax(Prec[Dxi[0]:Dxf[0]+1]))
                    # -----------------------------
                    # Max Intensity precipitation
                    # -----------------------------
                    IntPrecMax.append(MaxPrec[-1]/(dt/60))
                    # ------------------------
                    # P Index
                    # ------------------------
                    Pindex.append(IntPrecMax[-1]/IntPrec[-1])
                    # ------------------------
                    # Dates Max 
                    # ------------------------
                    x = np.where(Prec[Dxi[0]:Dxf[0]]==MaxPrec[-1])[0][-1]
                    DatesMax.append(DatesEv[Dxi[0]+x])
                    DatesMax[-1] = DUtil.Dates_str2datetime([DatesMax[-1]],Date_Format=None)[0]
                    # ------------------------
                    # Precipitation Rate
                    # ------------------------
                    TasaPrec.append((MaxPrec[-1]-Prec[Dxi[0]])/((x)*dt/60))

        DatesEvMax = np.array(DatesMax)[0]
        DurPrec = np.array(DurPrec)[0]
        TotalPrec = np.array(TotalPrec)[0]
        IntPrec = np.array(IntPrec)[0]
        IntPrecMax = np.array(IntPrecMax)[0]
        MaxPrec = np.array(MaxPrec)[0]
        Pindex = np.array(Pindex)[0]
        TasaPrec = np.array(TasaPrec)[0]

    else:
        for i in range(len(DatesEv)):
            # Verify event data
            q = sum(~np.isnan(Prec[i]))
            if q <= len(DatesEv[i])*.90:
                DurPrec.append(np.nan)
                TotalPrec.append(np.nan)
                IntPrec.append(np.nan)
                IntPrecMax.append(np.nan)
                MaxPrec.append(np.nan)
                Pindex.append(np.nan)
                TasaPrec.append(np.nan)
                DatesMax.append(np.nan)
            else:
                # ------------------------
                # Rainfall duration
                # ------------------------
                Dxi = np.where(DatesEv[i] == DatesEvst_Aft[i])[0]
                Dxf = np.where(DatesEv[i] == DatesEvend_Aft[i])[0]
                DurPrec.append((Dxf[0]-Dxi[0]+1)*dt/60) # Duración en horas
                # Se verifica que haya información
                q = sum(~np.isnan(Prec[i,Dxi[0]:Dxf[0]+1]))
                if q <= len(Prec[i,Dxi[0]:Dxf[0]+1])*.50:
                    DurPrec[-1] = np.nan
                    TotalPrec.append(np.nan)
                    IntPrec.append(np.nan)
                    IntPrecMax.append(np.nan)
                    MaxPrec.append(np.nan)
                    Pindex.append(np.nan)
                    TasaPrec.append(np.nan)
                    DatesMax.append(np.nan)
                else:
                    # ------------------------
                    # Precipitation total
                    # ------------------------
                    TotalP = np.nansum(Prec[i,Dxi[0]:Dxf[0]+1])
                    TotalPrec.append(TotalP)
                    # -----------------------------
                    # Mean Intensity precipitation
                    # -----------------------------
                    IntPrec.append(TotalP/DurPrec[-1])
                    if IntPrec[-1] >= 100:
                        DurPrec[-1] = np.nan
                        TotalPrec[-1] = np.nan
                        IntPrec[-1] = np.nan
                        IntPrecMax.append(np.nan)
                        MaxPrec.append(np.nan)
                        Pindex.append(np.nan)
                        TasaPrec.append(np.nan)
                        DatesMax.append(np.nan)
                    else:
                        # ------------------------
                        # Maximum Precipitation
                        # ------------------------
                        MaxPrec.append(np.nanmax(Prec[i,Dxi[0]:Dxf[0]+1]))
                        # -----------------------------
                        # Max Intensity precipitation
                        # -----------------------------
                        IntPrecMax.append(MaxPrec[-1]/(5/60))
                        # ------------------------
                        # P Index
                        # ------------------------
                        Pindex.append(IntPrecMax[-1]/IntPrec[-1])
                        # ------------------------
                        # Dates Max 
                        # ------------------------
                        x = np.where(Prec[i,Dxi[0]:Dxf[0]]==MaxPrec[-1])[0][-1]
                        DatesMax.append(DatesEv[i,Dxi[0]+x])
                        DatesMax[-1] = DUtil.Dates_str2datetime([DatesMax[-1]],Date_Format=None)[0]
                        # ------------------------
                        # Precipitation Rate
                        # ------------------------
                        TasaPrec.append((MaxPrec[-1]-Prec[i,Dxi[0]])/((x)*dt/60))

        DatesEvMax = np.array(DatesMax)
        DurPrec = np.array(DurPrec)
        TotalPrec = np.array(TotalPrec)
        IntPrec = np.array(IntPrec)
        IntPrecMax = np.array(IntPrecMax)
        MaxPrec = np.array(MaxPrec)
        Pindex = np.array(Pindex)
        TasaPrec = np.array(TasaPrec)


    Results = {'DurPrec':DurPrec,'TotalPrec':TotalPrec,'IntPrec':IntPrec,
    'MaxPrec':MaxPrec,'DatesEvst':DatesEvst,'DatesEvend':DatesEvend,
    'DatesEvMax':DatesEvMax,'Pindex':Pindex,'IntPrecMax':IntPrecMax,'TasaPrec':TasaPrec}
    return Results
Ejemplo n.º 16
0
def ConsDaysOverOrLower(Data,Dates,Value,Comparation='Over'):
    '''
    DESCRIPTION:
    
        This function calculates the number of consecutive days with 
        values over or lower a specific value and also gives the dates at
        the beginning and end of evey season.
    _______________________________________________________________________

    INPUT:
        + Data: Data that needs to be counted.
        + Dates: Dates of the data, can be in datetime or string vector.
                 if the Dates are in a string vector it has to be in
                 yyyy/mm/dd.
        + Value: Value to search days over or lower.
        + Comparation: String with the comparation that is going to be 
                       done.
                       
                       It can recognize the following strings:

                             String           |       Interpretation 
                       'Over', 'over' or '>'  |             >
                       'Lower', 'lower' or '<'|             <
                              '>='            |             >=
                              '<='            |             <=
    _______________________________________________________________________
    
    OUTPUT:

        The output is the dictionary results with the following keys:

        - TotalNumDays: Vector with total number of consecutive days 
                        above or below the value.
        - TotalPrecCount: Vector with the total of values during the
                          different number of days, works with
                          precipitation, averages needs to be 
                          determined manually.
        - TotalDateB: Starting dates of the different events.
        - MaxNumDays: Maximum number of consecutive days above 
                      or below the value.
        - MaxPrecCount_MaxDay: Maximum values in the maximum day.
        - MaxNumDays_MaxPrec: Maximum days in the maximum values.
        - DateMaxDays: Beginnig date of the maximum days count.
    '''
    # keys
    keys = ['TotalNumDays','TotalPrecCount','TotalDateB','MaxNumDays',\
        'MaxPrecCount','DateMaxDays','MaxPrecCount_MaxDay','MaxNumDays_MaxPrec']
    # Determine operation
    Comp = utl.Oper_Det(Comparation)
    # ----------------
    # Error managment
    # ----------------
    if Comp == -1:
        return -1
    # Dates Error managment
    if isinstance(Dates[0],str):
        DatesP = DUtil.Dates_str2datetime(Dates)
        if list(DatesP)[0] == -1:
            return -1
        elif isinstance(DatesP[0],datetime):
            Er = utl.ShowError('DaysOverOrLower','Hydro_Analysis','Dates are not in days')
            return Er
    else:
        DatesP = Dates
        if isinstance(DatesP[0],datetime):
            Er = utl.ShowError('DaysOverOrLower','Hydro_Analysis','Dates are not in days')
            return Er
    # --------------
    # Calculations
    # --------------
    results = dict()
    results['TotalNumDays'] = [] # List with all the days
    results['TotalPrecCount'] = [] # List for the total
    results['TotalDateB'] = [] # List of dates
    x = np.where(Comp(Data,Value))[0]
    if len(x) > 1:
        # Loop to calculate all the days
        DaysCount = 1 # Days counter
        PrecCount = Data[x[0]] # Value counter
        for i in range(1,len(x)):
            if x[i] == x[i-1]+1:
                DaysCount += 1
                PrecCount += Data[x[i]]
            else:
                results['TotalNumDays'].append(DaysCount)
                results['TotalPrecCount'].append(PrecCount)
                results['TotalDateB'].append(DatesP[x[i]-DaysCount])
                DaysCount = 0
                PrecCount = 0

            if i == len(x)-1:
                results['TotalNumDays'].append(DaysCount)
                results['TotalPrecCount'].append(PrecCount)
                results['TotalDateB'].append(DatesP[x[i]-DaysCount])
                DaysCount = 0
                PrecCount = 0

        results['TotalNumDays'] = np.array(results['TotalNumDays'])
        # Maximum number of days
        results['MaxNumDays'] = np.max(results['TotalNumDays'])
        x = np.where(results['TotalNumDays'] == results['MaxNumDays'])[0]

        results['TotalPrecCount'] = np.array(results['TotalPrecCount'])
        # Maximum value counter of number of days that was max
        results['MaxPrecCount_MaxDay'] = np.max(results['TotalPrecCount'][x])

        # Maximum value in those days
        results['MaxPrecCount'] = np.max(results['TotalPrecCount'])
        x = np.where(results['TotalPrecCount'] == results['MaxPrecCount'])[0]
        # Maximum number of days of the maximum value
        results['MaxNumDays_MaxPrec'] = np.max(results['TotalNumDays'][x])

        # Beginning date of the maximum 
        results['TotalDateB'] = np.array(results['TotalDateB'])
        xx = np.where(results['TotalNumDays'] == results['MaxNumDays'])[0]
        results['DateMaxDays'] = results['TotalDateB'][xx]
    else:
        for ikey,key in enumerate(keys):
            if ikey > 2:
                results[key] = 0
            else:
                results[key] = np.array([0])

    return results
Ejemplo n.º 17
0
def DaysOverOrLower(Data,Dates,Value,flagMonths=False,Comparation='Over'):
    '''
    DESCRIPTION:

        This function calculates the days over or lower one specific value
        in every year or month and year of the series.
    _______________________________________________________________________

    INPUT:
        + Data: Data that needs to be counted.
        + Dates: Dates of the data, can be in datetime or string vector.
                 if the Dates are in a string vector it has to be in
                 yyyy/mm/dd.
        + Value: Value to search days over or lower.
        + flagMonths: flag to know if the count would be monthly.
                      True: to make the count monthly.
                      False: to make the count annualy.
        + Comparation: String with the comparation that is going to be 
                       done.
                       
                       It can recognize the following strings:

                             String           |       Interpretation 
                       'Over', 'over' or '>'  |             >
                       'Lower', 'lower' or '<'|             <
                              '>='            |             >=
                              '<='            |             <=

    _______________________________________________________________________
    
    OUTPUT:
    
        - results: number of days over or lower a value for every year or
                   month.
        - An: Dates where the operation was made.
    '''
    # Determine operation
    Comp = utl.Oper_Det(Comparation)
    # ----------------
    # Error managment
    # ----------------
    if Comp == -1:
        return -1, -1
    # Dates Error managment
    if isinstance(Dates[0],str):
        DatesP = DUtil.Dates_str2datetime(Dates)
        if list(DatesP)[0] == -1:
            return -1, -1
        elif isinstance(DatesP[0],datetime):
            Er = utl.ShowError('DaysOverOrLower','Hydro_Analysis','Dates are not in days')
            return Er, Er
    else:
        DatesP = Dates
        if isinstance(DatesP[0],datetime):
            Er = utl.ShowError('DaysOverOrLower','Hydro_Analysis','Dates are not in days')
            return Er, Er
    # ----------------
    # Dates managment
    # ----------------
    if flagMonths:
        An = DUtil.Dates_datetime2str(DatesP,Date_Format='%Y/%m')
        if list(An)[0] == -1:
            return -1,-1
    else:
        An = DUtil.Dates_datetime2str(DatesP,Date_Format='%Y')
        if list(An)[0] == -1:
            return -1,-1
    # ----------------
    # Calculations
    # ----------------
    results = []
    if flagMonths:      
        for i in range(DatesP[0].year,DatesP[-1].year+1):
            for j in range(1,13):
                if j < 10:
                    m = '%s/0%s' %(i,j)
                else:
                    m = '%s/%s' %(i,j)
                x = np.where(An == m)[0]
                P = Data[x]
                xx = np.where(Comp(P,Value))[0]
                if sum(~np.isnan(P)) >= 0.70*len(P):
                    results.append(len(xx))
                else:
                    results.append(np.nan)
    else:
        for i in range(DatesP[0].year,DatesP[-1].year+1):
            x = np.where(An == str(i))[0]

            P = Data[x]
            xx = np.where(Comp(P,Value))[0]
            if sum(~np.isnan(P)) >= 0.70*len(P):
                results.append(len(xx))
            else:
                results.append(np.nan)

    return results,An
Ejemplo n.º 18
0
def CompDC(Dates,V,DateI,DateE,dtm=None):
    '''
    DESCRIPTION:
    
        This function completes or cut data from specific dates.
    _____________________________________________________________________

    INPUT:
        :param Dates: Data date, it must be a string like this 
                      'Year/month/day' the separator '/' 
                      could be change with any character.  
                      It must be a string vector or a date or datetime vector.
        :param VC:    Variable. 
        :param DateI: Initial Date in date or datetime format.
        :param DateE: Final Date in date or datetime format.
        :param dtm:   Time delta for the full data, if None it 
                      would use the timedelta from the 2 values 
                      of the original data
    _____________________________________________________________________
    
    OUTPUT:
        :return Results: A dict, Dictionary with the following results.
            DatesC: Complete date string vector.
            V1C:    Filled data values.
            DatesN: Complete date Python datetime vector.
    '''
    
    V = np.array(V)
    Dates = np.array(Dates)
    # ---------------------
    # Error managment
    # ---------------------

    if isinstance(Dates[0],str) == False and isinstance(Dates[0],date) == False and isinstance(Dates[0],datetime) == False:
        Er = utl.ShowError('CompD','EDSM','Bad format in dates')
        raise Er
    if len(Dates) != len(V):
        Er = utl.ShowError('CompD','EDSM','Date and V are different length')
        raise Er
    if dtm != None and isinstance(dtm,timedelta) == False:
        Er = utl.ShowError('CompD','EDSM','Bad dtm format')
        raise Er

    # ---------------------
    # Dates Calculations
    # ---------------------
    # Original Dates
    if isinstance(Dates[0],str):
        DatesO = DUtil.Dates_str2datetime(Dates)
    else:
        DatesO = Dates

    if dtm == None:
        dtm = DatesO[1]-DatesO[0]
    DatesN = DUtil.Dates_Comp(DateI,DateE,dtm=dtm)
    
    # -------------------------------------------
    # Data Extraction
    # -------------------------------------------

    # Filled data
    if isinstance(V[0],str):
        VC = (np.empty(len(DatesN))*np.nan).astype(str)
    else:
        VC = (np.empty(len(DatesN))*np.nan)
    
    DatesN = np.array(DatesN)
    DatesO = np.array(DatesO)
    for iF,F in enumerate(DatesO):
        x = np.where(DatesN == F)[0]
        if len(x) == 1:
            VC[x] = V[iF]
        elif len(x) > 1:
            VC[x[0]] = V[iF]
    
    DatesC = DUtil.Dates_datetime2str(DatesN)

    Results = {'DatesC':DatesC,'DatesN':DatesN,'VC':VC}
    return Results
Ejemplo n.º 19
0
    def Dates_Comp(self, DateI, DateE, dtm=timedelta(1)):
        '''
        DESCRIPTION:
    
            This function creates a date or datetime vector from an initial 
            date to a ending date.          
        _______________________________________________________________________

        INPUT:
            :param DateI: A datetime value, Initial Date in date or datetime.
            :param DateE: A datetime value, Ending Date in date or datetime.
            :param dtm:   A timedelta, Time delta.
        _______________________________________________________________________
        
        OUTPUT:
        
            :return DatesC: A ndarray, Complete date or datetime vector.
        '''
        # ---------------------
        # Constants
        # ---------------------
        flagH = False  # flag for the hours
        flagM = False  # flag for the minutes
        # ---------------------
        # Error managment
        # ---------------------
        if isinstance(dtm, timedelta) == False:
            Er = utl.ShowError('Dates_Comp', 'DatesUtil', 'Bad dtm format')
            raise Er

        if isinstance(DateI, datetime) and dtm.seconds == 0:
            Er = utl.ShowError('Dates_Comp', 'DatesUtil',
                               'Bad time delta given')
            raise Er

        if isinstance(DateI, date) == False or isinstance(DateE,
                                                          date) == False:
            Er = utl.ShowError('Dates_Comp', 'DatesUtil',
                               'Bad DateI and DateE format')
            raise Er

        if isinstance(DateI, datetime) and isinstance(DateE,
                                                      datetime) == False:
            Er = utl.ShowError('Dates_Comp', 'DatesUtil',
                               'Bad DateI and DateE format')
            raise Er

        if isinstance(DateI, datetime) == False and isinstance(
                DateE, datetime):
            Er = utl.ShowError('Dates_Comp', 'DatesUtil',
                               'Bad DateI and DateE format')
            raise Er
        # ---------------------
        # Generate series
        # ---------------------
        if isinstance(DateI, datetime):
            flagH = True

        yeari = DateI.year
        yearf = DateE.year
        monthi = DateI.month
        monthf = DateE.month

        DatesC = [DateI]
        Date = DateI
        for Y in range(yeari, yearf + 1):
            for m in range(1, 13):
                Fi = date(Y, m, 1)
                if m == 12:
                    Ff = date(Y + 1, 1, 1)
                else:
                    Ff = date(Y, m + 1, 1)
                Difd = (Ff - Fi).days
                for d in range(Difd):
                    if flagH:
                        Dif = dtm.seconds
                        if Dif < 3600:
                            flagM = True
                        if int(Dif / 60 / 60) == 0:
                            dtt = 1
                        else:
                            dtt = int(Dif / 60 / 60)
                        for h in range(0, 24, dtt):
                            if flagM:
                                # DifM = Dif/60
                                for M in range(0, 60, int(Dif / 60)):
                                    if Date <= DateI or Date > DateE + dtm:
                                        Date += dtm
                                    else:
                                        DatesC.append(Date)
                                        Date += dtm
                            else:
                                if Date <= DateI or Date > DateE + dtm:
                                    Date += dtm
                                else:
                                    DatesC.append(Date)
                                    Date += dtm
                    else:
                        if Date <= DateI or Date >= DateE + dtm:
                            Date += dtm
                        else:
                            DatesC.append(Date)
                            Date += dtm
        DatesC = np.array(DatesC)

        return DatesC
Ejemplo n.º 20
0
def CompD(Dates,V,dtm=None):
    '''
    DESCRIPTION:
    
        This function takes a data series and fill the missing dates with
        nan values, It would fill the entire year.
    _______________________________________________________________________

    INPUT:
        :param Dates: A list or ndarray, Data date, it must be a string 
                                         vector or a date or datetime 
                                         vector.
        :param V:     A list or ndarray, Variable that wants to be 
                                         filled. 
        :param dtm:   A list or ndarray, Time delta for the full data, 
                                         if None it would use the 
                                         timedelta from the 2 values of 
                                         the original data.
    _______________________________________________________________________
    
    OUTPUT:
        :return DateC: A ndarray, Comlete date string vector.
        :return VC:    A ndarray, Filled data values.
        :return DateN: A ndarray, Complete date Python datetime vector.
    '''
    V = np.array(V)
    Dates = np.array(Dates)
    # ---------------------
    # Error managment
    # ---------------------

    if isinstance(Dates[0],str) == False and isinstance(Dates[0],date) == False and isinstance(Dates[0],datetime) == False:
        utl.ShowError('CompD','EDSM','not expected format in dates')
    if len(Dates) != len(V):
        utl.ShowError('CompD','EDSM','Date and V are different length')
    if dtm != None and isinstance(dtm,timedelta) == False:
        utl.ShowError('CompD','EDSM','Bad dtm format')

    # Eliminate the errors in February
    if isinstance(Dates[0],str):
        lenDates = len(Dates)
        Dates2 = np.array([i[:10] for i in Dates])
        for iY,Y in enumerate(range(int(Dates2[0][:4]),int(Dates2[-1][:4]))):
            Fi = date(Y,2,1)
            Ff = date(Y,3,1)
            Dif = (Ff-Fi).days
            if Dif == 28:
                x = np.where(Dates2 == '%s/02/29' %(Y))
                Dates = np.delete(Dates,x)
                V = np.delete(V,x)
            x = np.where(Dates2 == '%s/02/30' %(Y))
            Dates = np.delete(Dates,x)
            V = np.delete(V,x)

    # ---------------------
    # Dates Calculations
    # ---------------------
    # Original Dates
    if isinstance(Dates[0],str):
        DatesO = DUtil.Dates_str2datetime(Dates)
    else:
        DatesO = Dates
    if dtm == None:
        dtm = DatesO[1]-DatesO[0]
    # Complete Dates
    if isinstance(DatesO[0],datetime):
        DateI = datetime(DatesO[0].year,1,1,0,0)
        DateE = datetime(DatesO[-1].year,12,31,23,59)
        DatesN = DUtil.Dates_Comp(DateI,DateE,dtm=dtm)
    else:
        DateI = date(DatesO[0].year,1,1)
        DateE = date(DatesO[-1].year,12,31)
        DatesN = DUtil.Dates_Comp(DateI,DateE,dtm=dtm)
    # Filled data
    VC = np.empty(len(DatesN))*np.nan
    DatesN = np.array(DatesN)
    DatesO = np.array(DatesO)
    V = np.array(V)
    # x = DatesN.searchsorted(DatesO)
    x = np.searchsorted(DatesN,DatesO) 

    try:
        VC[x] = V
    except ValueError:
        VC = np.array(['' for i in range(len(DatesN))]).astype('<U20')
        VC[x] = V
    
    DatesC = DUtil.Dates_datetime2str(DatesN)

    Results = {'DatesC':DatesC,'DatesN':DatesN,'VC':VC}
    return Results
Ejemplo n.º 21
0
    def Dates_str2datetime(self, Dates, Date_Format=None, flagQuick=False):
        '''
        DESCRIPTION:
    
            From a string type vector the function converts the dates to 
            python date or datetime data from a given format, if no date
            format is specified the function looks up for a fitting format.
        _______________________________________________________________________

        INPUT:
            + Dates: String date vector that needs to be changed to date or
                     datetime vector.
            + Dates_Format: Format of the dates given, it must be given in 
                            datetime string format like %Y/%m/%d %H%M.
        _______________________________________________________________________
        
        OUTPUT:
        
            - DatesP: Python date or datetime format vector.
        '''
        # ----------------
        # Error managment
        # ----------------
        if isinstance(Dates[0], str) == False:
            Er = utl.ShowError('Dates_str2datetime', 'DatesUtil',
                               'Bad Dates format given, not in string format')
            raise TypeError
        # -------------------------
        # Temporal verification
        # -------------------------
        lenDates = len(Dates[0])
        flagHour = False
        if lenDates > 10:
            flagHour = True
        # -------------------------
        # Date_Format Verification
        # -------------------------
        if Date_Format == None:
            if flagHour:
                Date_Formats = self.DateTime_Formats
            else:
                Date_Formats = self.Date_Formats
        else:
            Date_Formats = [Date_Format]
        # -------------------------
        # Transformation
        # -------------------------
        for iF, F in enumerate(Date_Formats):
            try:
                if flagQuick:
                    if flagHour:
                        DatesP2 = np.array([
                            datetime(int(i[:4]), int(i[5:7]), int(i[8:10]),
                                     int(i[11:13]), int(i[13:15]))
                            for i in Dates
                        ])
                    else:
                        DatesP2 = np.array([
                            datetime(int(i[:4]), int(i[5:7]), int(i[8:10]), 0,
                                     0) for i in Dates
                        ])
                else:
                    DatesP2 = np.array(
                        [datetime.strptime(i, F) for i in Dates])
                break
            except:
                if iF == len(Date_Formats) - 1:
                    Er = utl.ShowError('Dates_str2datetime', 'DatesUtil',
                                       'Bad date format, change format')
                    raise TypeError
                else:
                    continue
        # -------------------------
        # Changing Dates
        # -------------------------
        if flagHour == False:
            DatesP = np.array([i.date() for i in DatesP2])
        else:
            DatesP = DatesP2

        return DatesP
Ejemplo n.º 22
0
    def FF(self, xdata, ydata, F='lineal', alpha=0.05, flagParabolic=False):
        '''
        DESCRIPTION:
        
            This function takes x and y data and makes a fitting of the data with
            the function that wants to be added.
        _______________________________________________________________________

        INPUT:
            :param xdata:         A ndArray, Data in the x axis.
            :param ydata:         A ndArray, Data in the y axis.
            :param F:             A str, Function that wants to be fitted, 
                                  by default is 'lineal'.
            :param alpha:         A float, significance level.
            :param flagParabolic: A boolean, flag to adjust Parabolic in 
                                  automatic.

                 The functions that are defaulted to fit are the following:

        _______________________________________________________________________
        
        OUTPUT:
            This function return a dictionary with the following data:
                - Coef: Ceofficients
                - perr: Standard deviation errors of the parameters.
                - R2: Coefficient of determination.
        '''
        # --------------------------------
        # Error Managment and Parameters
        # --------------------------------

        if isinstance(F, str) == False and F != 0:
            utl.ShowError('FF', 'CFitting',
                          'Given parameter F is not on the listed functions.')

        keys = list(self.Adj)
        if F == '' or F == 0:
            flagfitbest = True
        else:
            key = F.lower()
            flagfitbest = False
            try:
                fun = self.Adj[key]
            except KeyError:
                utl.ShowError(
                    'FF', 'CFitting',
                    'Given parameter F is not on the listed functions.')

        # -----------------
        # Calculations
        # -----------------
        X, Y = DM.NoNaN(xdata, ydata, False)
        Results = dict()

        if flagfitbest:
            CoefT = dict()
            perrT = dict()
            keysT = []
            R2T = []
            keys2 = []

            for ikey, key in enumerate(keys):
                if not (flagParabolic) and key == 'parabolic':
                    continue
                try:
                    keys2.append(key)
                    fun = self.Adj[key]
                    # Fitting
                    Coef, pcov = curve_fit(fun, X, Y)
                    # R2 calculations
                    ss_res = np.dot((Y - fun(X, *Coef)), (Y - fun(X, *Coef)))
                    ymean = np.mean(Y)
                    ss_tot = np.dot((Y - ymean), (Y - ymean))
                    R2T.append(1 - (ss_res / ss_tot))

                    perrT[key] = np.sqrt(np.diag(pcov))
                    CoefT[key] = Coef
                except RuntimeError:
                    print('WARNING: Cannot fit a', key)
                    perrT[key] = np.nan
                    CoefT[key] = np.nan
                    R2T.append(np.nan)
                    continue

            # Verify the maximum R^2
            x = np.where(np.array(R2T) == np.nanmax(np.array(R2T)))[0]
            if len(x) > 1:
                x = x[0]
            key = np.array(keys2)[x][0]
            Results['Coef'] = CoefT[key]
            Results['perr'] = perrT[key]
            Results['R2'] = np.array(R2T)[x][0]
            Results['Functionkey'] = key
            Results['Function'] = self.Adj[key]
            Results['FunctionEq'] = self.AdjF[key]
        else:
            # Fitting
            Coef, pcov = curve_fit(fun, X, Y)
            ss_res = np.dot((Y - fun(X, *Coef)), (Y - fun(X, *Coef)))
            perr = np.sqrt(np.diag(pcov))
            ymean = np.mean(Y)
            ss_tot = np.dot((Y - ymean), (Y - ymean))
            R2 = 1 - (ss_res / ss_tot)
            Results['Coef'] = Coef
            Results['perr'] = perr
            Results['R2'] = R2
            Results['Functionkey'] = key
            Results['Function'] = fun
            Results['FunctionEq'] = self.FunctionsEqstr(self.AdjF[key], Coef)

        # Confidence intervals of the parameters
        n = len(Y)  # Number of data
        p = len(Results['Coef'])  # Number of parameters
        dof = max(0, n - p)  # Number of degrees of freedom
        # Student-t value for the dof and confidence level
        tval = t.ppf(1.0 - alpha / 2.0, dof)

        Results['ConInt'] = []
        for i, p, var in zip(range(n), Results['Coef'], np.diag(pcov)):
            sigma = var**0.5
            Results['ConInt'].append([p - sigma * tval, p + sigma * tval])

        # ---------------------
        # Errors
        # ---------------------
        # Estimation Error
        SS = []
        VC = Results['Function'](X, *Results['Coef'])
        for iy, y in enumerate(VC):
            SS.append((Y[iy] - y)**2)
        EErr = np.sqrt((1 / (n - 2)) * np.sum(np.array(SS)))
        Results['EErr'] = EErr
        # RSME (Root Mean Square Error)
        SS = []
        VC = Results['Function'](X, *Results['Coef'])
        for iy, y in enumerate(VC):
            SS.append((y - Y[iy])**2)
        RSME = np.sqrt((1 / (n)) * np.sum(np.array(SS)))
        Results['RSME'] = RSME
        # MBE (Mean Bias Error)
        SS = []
        VC = Results['Function'](X, *Results['Coef'])
        for iy, y in enumerate(VC):
            SS.append((y - Y[iy]))
        RSME = np.sqrt((1 / (n)) * np.sum(np.array(SS)))
        Results['MBE'] = RSME
        # MPE (Mean Percentage Error)
        SS = []
        VC = Results['Function'](X, *Results['Coef'])
        for iy, y in enumerate(VC):
            SS.append(((y - Y[iy]) / Y[iy]) * 100)
        RSME = np.sqrt((1 / (n)) * np.sum(np.array(SS)))
        Results['MPE'] = RSME
        return Results
Ejemplo n.º 23
0
    def LoadData(self, Station=None, flagComplete=True, dt=5):
        '''
        DESCRIPTION:
            This function loads the data of a station and compiles 
            it in a dictionary.
        ___________________________________________________________________
        INPUT:
            :param Station:      A str, List with the stations that would
                                    be extracted.
            :param flagComplete: A boolean, flag to determine if
                                            completes the data
        '''
        self.flagComplete = flagComplete
        self.Station = Station
        # -------------------------
        # Error Managment
        # -------------------------
        if not (isinstance(Station, list)) and Station != None and not (
                isinstance(Station, str)):
            Er = utl.ShowError('OpenWundergrounds', 'LoadData',
                               'Erroneus type for parameter Station')
            raise TypeError
        # -------------------------
        # Stations
        # -------------------------
        if Station == None:
            Station = self.Stations[0]
        elif isinstance(Station, list):
            Station = Station[0]
        # -------------------------
        # Parameters
        # -------------------------
        DataBase = {
            'DataBaseType': 'txt',
            'deli': self.deli,
            'colStr': None,
            'colData': None,
            'row_skip': 1,
            'flagHeader': True,
            'rowH': 0,
            'row_end': 0,
            'str_NaN': '',
            'num_NaN': None,
            'dtypeData': float
        }
        LabelsH = ['H', 'maxH', 'minH']
        LabelsD = ['D', 'NF', 'NNF', 'maxD', 'M', 'minM', 'maxM', 'minD']

        self.LabelsH = LabelsH
        self.LabelsD = LabelsD

        # -------------------------
        # Verify Headers
        # -------------------------
        # Headers
        Headers = np.genfromtxt(self.Arch[Station][0],
                                dtype=str,
                                skip_header=0,
                                delimiter=self.deli,
                                max_rows=1)
        # Verify colStr data
        colStr = []
        LabStrNo = []
        LabStrYes = []
        for lab in self.LabelsStr:
            x = np.where(Headers == lab)[0]
            if len(x) > 0:
                colStr.append(x[0])
                LabStrYes.append(lab)
            else:
                LabStrNo.append(lab)
        DataBase['colStr'] = tuple(colStr)
        # Verify colData data
        colData = []
        LabDataNo = []
        LabDataYes = []
        for lab in self.LabelsData:
            x = np.where(Headers == lab)[0]
            if len(x) > 0:
                colData.append(x[0])
                LabDataYes.append(lab)
            else:
                LabDataNo.append(lab)
        self.LabStrYes = LabDataYes
        DataBase['colData'] = tuple(colData)

        # -------------------------
        # Extract information
        # -------------------------
        EM = EMSD()

        for iar, ar in enumerate(self.Arch[Station]):
            try:
                R = EM.Open_Data(ar, DataBase=DataBase)
            except ValueError:
                print('Error document data:', ar)
                continue
            if iar == 0:
                Data = R
            else:
                for iLab, Lab in enumerate(LabStrYes):
                    Data[Lab] = np.hstack((Data[Lab], R[Lab]))
                for iLab, Lab in enumerate(LabDataYes):
                    try:
                        Data[Lab] = np.hstack((Data[Lab], R[Lab]))
                    except KeyError:
                        Data[Lab] = np.hstack(
                            (Data[Lab],
                             np.empty(R[LabDataYes[0]].shape) * np.nan))

        # for Lab in LabStrNo:
        #     R[Lab] = np.array(['nan' for i in len(R['Time'])])
        # for Lab in LabDataNo:
        #     R[Lab] = np.array([np.nan for i in len(R['Time'])])

        # DatesS = [i[:16] for i in Data['Time']]
        DatesS = Data['Time']
        Dates = DUtil.Dates_str2datetime(DatesS,
                                         Date_Format='%Y-%m-%d %H:%M:%S')
        Data.pop('Time', None)

        # -------------------------
        # Data Completion
        # -------------------------
        LabelsHmat = []
        LabelsDmat = []
        # Data in years
        DataC = dict()
        DataCC = dict()
        # Se llenan los datos
        DataH = dict()  # Datos Horarios
        DataD = dict()  # Datos Diarios
        self.DatesC = dict()
        self.DatesD = dict()  # Fechas diarias
        for iLab, Lab in enumerate(LabDataYes):
            VC = DMan.CompD(Dates, Data[Lab], dtm=timedelta(0, 60))
            # Precipitation corrected
            if Lab == 'HourlyPrecipMM':
                VC['VC'] = VC['VC'] * 5 / 60
            if iLab == 0:
                DatesC = VC['DatesC']
            DataC[self.LabDataSave[Lab]] = VC['VC']
            # Se pasa la información a cada 5 minutos
            DatesCC, DatesCN, DataCC[self.LabDataSave[Lab]] = DMan.Ca_E(
                DatesC,
                DataC[self.LabDataSave[Lab]],
                dt=dt,
                escala=-1,
                op=self.LabDataOper[Lab],
                flagNaN=False)
            # Data Eliminations
            if self.ElimOver[self.LabDataSave[Lab]] != None:
                DataCC[self.LabDataSave[Lab]][
                    DataCC[self.LabDataSave[Lab]] > self.ElimOver[
                        self.LabDataSave[Lab]]] = np.nan
            if self.ElimLow[self.LabDataSave[Lab]] != None:
                DataCC[self.LabDataSave[Lab]][
                    DataCC[self.LabDataSave[Lab]] < self.ElimLow[
                        self.LabDataSave[Lab]]] = np.nan

            # Se convierten los datos
            DatesC2, DatesNC2, VC2 = EM.Ca_EC(Date=DatesCC,
                                              V1=DataCC[self.LabDataSave[Lab]],
                                              op=self.LabDataOper1[Lab],
                                              key=None,
                                              dtm=dt,
                                              op2=self.LabDataOper2[Lab],
                                              op3=self.LabDataOper3[Lab])

            for LabH in LabelsH:
                DataH[self.LabDataSave[Lab] + LabH] = VC2[LabH]
                LabelsHmat.append(self.LabDataSave[Lab] + LabH)
            for LabD in LabelsD:
                DataD[self.LabDataSave[Lab] + LabD] = VC2[LabD]
                LabelsDmat.append(self.LabDataSave[Lab] + LabD)

        self.DataH = DataH
        self.DataD = DataD
        self.DatesH = DatesC2['DateH']
        self.DatesD['DatesD'] = DatesC2['DateD']
        self.DatesD['DatesM'] = DatesC2['DateM']
        self.DataCC = DataCC
        self.DatesCC = DatesCC
        self.DatesCN = DatesCN
        self.DatesNC2 = DatesNC2['DateMN']
        self.LabelsHmat = LabelsHmat
        self.LabelsDmat = LabelsDmat
        return