コード例 #1
0
ファイル: Medellin.py プロジェクト: DGD042/Libraries
    def LoadRadar(self):
        '''
        DESCRIPTION:
            This method loads the radar information.
        _________________________________________________________________
        INPUT:
        '''
        PathRadar = '/Users/DGD042/Documents/Est_Information/SIATA/Radar/01_nc/PPIVol/'
        PathRadarRHI = '/Users/DGD042/Documents/Est_Information/SIATA/Radar/01_nc/RHIVol/'
        # ---------------------
        # Radar Files
        # ---------------------
        self.DateRI = self.DateI+timedelta(0,5*60*60)
        self.DateRE = self.DateE+timedelta(0,5*60*60)
        DateR = '%04i%02i/'%(self.DateRI.year,self.DateRE.month)
        self.PathRadar = PathRadar+DateR
        Files = gl.glob(PathRadar+DateR+'*.gz')
        if len(Files) == 0:
            self.ShowError('LoadRadar','Medellin','No Radar PPIVol Files Found')
        self.Files = gl.glob(PathRadar+DateR+'*.gz')
        self.RadarDates = np.array([i[-32:-32+8]+i[-32+9:-32+8+5] for i in Files])
        self.RadarDatesP = DUtil.Dates_str2datetime(self.RadarDates,Date_Format='%Y%m%d%H%M')
        # ---------------------
        # Radar Files RHI
        # ---------------------
        if self.flagRHI:
            self.DateRIRHI = self.DateI+timedelta(0,5*60*60)
            self.DateRERHI = self.DateE+timedelta(0,5*60*60)
            DateR = '%04i%02i/'%(self.DateRIRHI.year,self.DateRERHI.month)
            self.PathRadarRHI = PathRadarRHI+DateR
            Files = gl.glob(PathRadarRHI+DateR+'*.gz')
            if len(Files) == 0:
                self.ShowError('LoadRadar','Medellin','No Radar RHIVol Files Found')
            self.FilesRHI = gl.glob(PathRadarRHI+DateR+'*.gz')
            self.RadarRHIDates = np.array([i[-32:-32+8]+i[-32+9:-32+8+5] for i in Files])
            self.RadarRHIDatesP = DUtil.Dates_str2datetime(self.RadarRHIDates,Date_Format='%Y%m%d%H%M')
        # ---------------------
        # Dates
        # ---------------------
        xi = np.where(self.RadarDatesP == self.DateRI)[0]
        while len(xi) == 0:
            xi = np.where(self.RadarDatesP == self.DateRI+timedelta(0,60))[0]
        xf = np.where(self.RadarDatesP == self.DateRE)[0]
        while len(xf) == 0:
            xf = np.where(self.RadarDatesP == self.DateRE-timedelta(0,60))[0]

        self.ArchRadar = self.Files[xi:xf+1]
        self.RadarDates = self.RadarDates[xi:xf+1]
        self.RadarDatesP = DUtil.Dates_str2datetime(self.RadarDates,Date_Format='%Y%m%d%H%M')
        
        if self.flagRHI:
            self.ArchRadarRHI = self.FilesRHI[xi:xf+1]
            self.RadarRHIDates = self.RadarRHIDates[xi:xf+1]
            self.RadarRHIDatesP = DUtil.Dates_str2datetime(self.RadarRHIDates,Date_Format='%Y%m%d%H%M')
        return
コード例 #2
0
ファイル: DatesC.py プロジェクト: DGD042/Libraries
    def __init__(self,Dates,Date_Format=None,flagQuick=False):
        '''
        DESCRIPTION:
            This class uses data from dates and converts them into
            datetime or string data, having all the data packed in
            one object.
        '''
        # -----------------
        # Error managment
        # -----------------
        assert isinstance(Dates[0],str) or isinstance(Dates[0],datetime) or isinstance(Dates[0],date)

        if isinstance(Dates[0],str):
            self.datetime = DUtil.Dates_str2datetime(Dates,Date_Format=Date_Format,flagQuick=flagQuick)
            self.str = DUtil.Dates_datetime2str(self.datetime,Date_Format=None)

        if isinstance(Dates[0],datetime) or isinstance(Dates[0],date):
            self.datetime = Dates
            self.str = DUtil.Dates_datetime2str(Dates)

        return
コード例 #3
0
ファイル: Medellin.py プロジェクト: DGD042/Libraries
 def __init__(self,DateI,DateE,endingmat='',Var='',flagRHI=False):
     '''
     '''
     # ----------------
     # Error Managment
     # ----------------
     if not(isinstance(endingmat,str)):
         self.ShowError('__init__','Medellin','endingmat must be a string')
     # ----------------
     # Paths
     # ----------------
     # ----------------
     # Constants
     # ----------------
     self.ImgFolder = {'Medellin':'Medellin/Cases'+endingmat+'/'}
     Labels = ['ID','Name','Latitud','Longitud']
     self.DateI = DateI
     self.DateE = DateE
     lenData = int((self.DateE-self.DateI).seconds/60)
     self.DatesSt = [DateI]
     for iEv in range(lenData):
         self.DatesSt.append(self.DatesSt[-1]+timedelta(0,60))
     self.DatesSt = np.array(self.DatesSt)
     DatesStr = DUtil.Dates_datetime2str([DateI],Date_Format='%Y%m')[0]
     self.PathImg = 'Tesis_MscR/02_Docs/01_Tesis_Doc/Kap5/Img/Medellin/Cases_'+Var+'/'+DatesStr+'/'
     self.VarA = Var
     self.flagRHI = flagRHI
     # ----------------
     # Load Information
     # ----------------
     self.SC = BPL.Scatter_Gen(DataBase='Medellin',
             endingmat=endingmat,PathImg=self.PathImg)
     # ---------------------
     # Station information
     # ---------------------
     self.ID = self.SC.ID
     self.St_Info = {}
     for iSt,St in enumerate(self.ID):
         self.St_Info[St] = {}
         for Lab in Labels:
             self.St_Info[St][Lab] = self.SC.StInfo['Medellin'][Lab][iSt]
         self.St_Info[St]['CodesNames'] = self.SC.StInfo['Medellin']['ID'][iSt]+ ' ' + self.SC.StInfo['Medellin']['Name'][iSt]
     return
コード例 #4
0
def CompD(Dates,V,dtm=None):
    '''
    DESCRIPTION:
    
        This function takes a data series and fill the missing dates with
        nan values, It would fill the entire year.
    _______________________________________________________________________

    INPUT:
        :param Dates: A list or ndarray, Data date, it must be a string 
                                         vector or a date or datetime 
                                         vector.
        :param V:     A list or ndarray, Variable that wants to be 
                                         filled. 
        :param dtm:   A list or ndarray, Time delta for the full data, 
                                         if None it would use the 
                                         timedelta from the 2 values of 
                                         the original data.
    _______________________________________________________________________
    
    OUTPUT:
        :return DateC: A ndarray, Comlete date string vector.
        :return VC:    A ndarray, Filled data values.
        :return DateN: A ndarray, Complete date Python datetime vector.
    '''
    V = np.array(V)
    Dates = np.array(Dates)
    # ---------------------
    # Error managment
    # ---------------------

    if isinstance(Dates[0],str) == False and isinstance(Dates[0],date) == False and isinstance(Dates[0],datetime) == False:
        utl.ShowError('CompD','EDSM','not expected format in dates')
    if len(Dates) != len(V):
        utl.ShowError('CompD','EDSM','Date and V are different length')
    if dtm != None and isinstance(dtm,timedelta) == False:
        utl.ShowError('CompD','EDSM','Bad dtm format')

    # Eliminate the errors in February
    if isinstance(Dates[0],str):
        lenDates = len(Dates)
        Dates2 = np.array([i[:10] for i in Dates])
        for iY,Y in enumerate(range(int(Dates2[0][:4]),int(Dates2[-1][:4]))):
            Fi = date(Y,2,1)
            Ff = date(Y,3,1)
            Dif = (Ff-Fi).days
            if Dif == 28:
                x = np.where(Dates2 == '%s/02/29' %(Y))
                Dates = np.delete(Dates,x)
                V = np.delete(V,x)
            x = np.where(Dates2 == '%s/02/30' %(Y))
            Dates = np.delete(Dates,x)
            V = np.delete(V,x)

    # ---------------------
    # Dates Calculations
    # ---------------------
    # Original Dates
    if isinstance(Dates[0],str):
        DatesO = DUtil.Dates_str2datetime(Dates)
    else:
        DatesO = Dates
    if dtm == None:
        dtm = DatesO[1]-DatesO[0]
    # Complete Dates
    if isinstance(DatesO[0],datetime):
        DateI = datetime(DatesO[0].year,1,1,0,0)
        DateE = datetime(DatesO[-1].year,12,31,23,59)
        DatesN = DUtil.Dates_Comp(DateI,DateE,dtm=dtm)
    else:
        DateI = date(DatesO[0].year,1,1)
        DateE = date(DatesO[-1].year,12,31)
        DatesN = DUtil.Dates_Comp(DateI,DateE,dtm=dtm)
    # Filled data
    VC = np.empty(len(DatesN))*np.nan
    DatesN = np.array(DatesN)
    DatesO = np.array(DatesO)
    V = np.array(V)
    # x = DatesN.searchsorted(DatesO)
    x = np.searchsorted(DatesN,DatesO) 

    try:
        VC[x] = V
    except ValueError:
        VC = np.array(['' for i in range(len(DatesN))]).astype('<U20')
        VC[x] = V
    
    DatesC = DUtil.Dates_datetime2str(DatesN)

    Results = {'DatesC':DatesC,'DatesN':DatesN,'VC':VC}
    return Results
コード例 #5
0
def Ca_E(FechaC,V1C,dt=24,escala=1,op='mean',flagMa=False,flagDF=False,flagNaN=True):
    '''
    DESCRIPTION:
    
        Con esta función se pretende cambiar de escala temporal los datos,
        agregándolos a diferentes escalas temporales, se deben insertar series
        completas de tiempo.

        Los datos faltantes deben estar como NaN.
    _______________________________________________________________________

    INPUT:
        + FechaC: Fecha de los datos organizada como 'año/mes/dia - HHMM' 
                  los '/' pueden ser substituidos por cualquier caracter. 
                  Debe ser un vector string y debe tener años enteros.
        + V1C: Variable que se desea cambiar de escala temporal. 
        + dt: Delta de tiempo para realizar la agregación, depende de 
              la naturaleza de los datos.
              Si se necesitan datos mensuales, el valor del dt debe ser 1.
        + escala: Escala a la cual se quieren pasar los datos:
                -1: de minutal.
                0: horario.
                1: a diario.
                2: a mensual, es necesario llevarlo primero a escala diaria.
        + op: Es la operación que se va a llevar a cabo para por ahora solo responde a:
              'mean': para obtener el promedio.
              'sum': para obtener la suma.
        + flagMa: Para ver si se quieren los valores máximos y mínimos.
                True: Para obtenerlos.
                False: Para no calcularos.
        + flagDF: Para ver si se quieren los datos faltantes por mes, solo funciona
                  en los datos que se dan diarios.
                True: Para calcularlos.
                False: Para no calcularos.
        + flagNaN: Flag to know if the user wants to include the calculations with low data.
    _______________________________________________________________________
    
    OUTPUT:
        - FechaEs: Nuevas fechas escaladas.
        - FechaNN: Nuevas fechas escaladas como vector fechas. 
        - VE: Variable escalada.
        - VEMax: Vector de máximos.
        - VEMin: Vector de mínimos.
    '''
    # Se desactivan los warnings en este codigo para que corra más rápido, los
    # warnings que se generaban eran por tener realizar promedios de datos NaN
    # no porque el código tenga un problema en los cálculos.
    warnings.filterwarnings('ignore')

    if escala > 2:
        utl.ShowError('EMSD','Ca_E','Todavía no se han programado estas escalas')

    # -------------------------------------------
    # Inicialización de variables
    # -------------------------------------------
    # Se inicializan las variables que se utilizarán
    FechaNN = ["" for k in range(1)]
    FechaEs = ["" for k in range(1)]
    VE = []
    VEMax = []
    VEMin = []

    NF = [] # Porcentaje de datos faltantes
    NNF = [] # Porcentaje de datos no faltantes
    rr = 0

    Oper = {'sum':np.nansum,'mean':np.nanmean}

    # -------------------------------------------
    # Vector de fechas
    # -------------------------------------------

    # Se toman los años
    yeari = int(FechaC[0][0:4]) # Año inicial
    yearf = int(FechaC[len(FechaC)-1][0:4]) # Año final
    Sep = FechaC[0][4] # Separador de la Fecha
    if isinstance(FechaC[0],str):
        DatesO = DUtil.Dates_str2datetime(FechaC)
    else:
        DatesO = FechaC

    # Los años se toman para generar el output de FechasEs
    if escala == -1:
        DateI = datetime(DatesO[0].year,1,1,0,0)
        DateE = datetime(DatesO[-1].year,12,31,23,59)
        dtm = timedelta(0,dt*60)
        FechaNN = DUtil.Dates_Comp(DateI,DateE,dtm=dtm)
        FechaEs = DUtil.Dates_datetime2str(FechaNN)
    elif escala == 0:
        DateI = datetime(DatesO[0].year,1,1,0,0)
        DateE = datetime(DatesO[-1].year,12,31,23,59)
        dtm = timedelta(0,60*60)
        FechaNN = DUtil.Dates_Comp(DateI,DateE,dtm=dtm)
        FechaEs = DUtil.Dates_datetime2str(FechaNN)
    elif escala == 1: # Para datos horarios o diarios
        for result in perdelta(date(int(yeari), 1, 1), date(int(yearf)+1, 1, 1), timedelta(days=1)):
            FR = result.strftime('%Y'+Sep+'%m'+Sep+'%d') # Fecha
            if escala == 0:
                for i in range(0,24):
                    if rr == 0:
                        FechaNN[0] = result
                        if i < 10:
                            FechaEs[rr] = FR + '-0' +str(i)+'00'
                        else:
                            FechaEs[rr] = FR + '-' +str(i)+'00'
                    else:
                        FechaNN.append(result)
                        if i < 10:
                            FechaEs.append(FR + '-0' +str(i)+'00')
                        else:
                            FechaEs.append(FR + '-' +str(i)+'00')
                    rr += 1 # Se suman las filas
            elif escala == 1:
                if rr == 0:
                    FechaNN[0] = result
                    FechaEs[rr] = FR
                else:
                    FechaNN.append(result)
                    FechaEs.append(FR)
                rr += 1
    if escala == 2:
        x = 0
        for i in range(int(yeari),int(yearf)+1):
            for j in range(1,13):
                if i == int(yeari) and j == 1:
                    FechaNN[0] = date(i,j,1)
                    FechaEs[0] = FechaNN[0].strftime('%Y'+Sep+'%m')
                else:
                    FechaNN.append(date(i,j,1))
                    FechaEs.append(FechaNN[x].strftime('%Y'+Sep+'%m'))
                x += 1
    # -------------------------------------------
    # Cálculo del escalamiento
    # -------------------------------------------
    dtt = 0 # Contador de la diferencia
    if op == 'mean':
        if escala == 0 or escala == -1 or escala == 1: 
            # Ciclo para realizar el agregamiento de los datos
            for i in range(0,len(V1C),dt): 
                dtt = dtt + dt # Se aumenta el número de filas en el contador
                q = np.isnan(V1C[i:dtt])
                qq = sum(q)
                qYes = sum(~np.isnan(V1C[i:dtt]))
                if (qq > dt*0.30 and flagNaN) or qYes == 0:
                    VE.append(np.nan)
                    if flagMa == True:
                        VEMax.append(np.nan)
                        VEMin.append(np.nan)
                else:
                    try:
                        VE.append(float(np.nanmean(V1C[i:dtt])))
                    except ValueError:
                        VE.append(np.nan)
                    if flagMa == True:
                        try:
                            VEMax.append(float(np.nanmax(V1C[i:dtt])))
                        except ValueError:
                            VEMax.append(np.nan)
                        try:
                            VEMin.append(float(np.nanmin(V1C[i:dtt])))
                        except ValueError:
                            VEMin.append(np.nan)

    elif op == 'sum':
        if escala == 0 or escala == -1 or escala == 1: 
            # Ciclo para realizar el agregamiento de los datos
            for i in range(0,len(V1C),dt): 
                dtt = dtt + dt # Se aumenta el número de filas en el contador
                q = np.isnan(V1C[i:dtt])
                qq = sum(q)
                qYes = sum(~np.isnan(V1C[i:dtt]))
                if (qq > dt*0.30 and flagNaN) or qYes == 0:
                    VE.append(np.nan)
                    if flagMa == True:
                        VEMax.append(np.nan)
                        VEMin.append(np.nan)
                else:
                    try:
                        VE.append(float(np.nansum(V1C[i:dtt])))
                    except ValueError:
                        VE.append(np.nan)
                    if flagMa == True:
                        try:
                            VEMax.append(float(np.nanmax(V1C[i:dtt])))
                        except ValueError:
                            VEMax.append(np.nan)
                        try:
                            VEMin.append(float(np.nanmin(V1C[i:dtt])))
                        except ValueError:
                            VEMin.append(np.nan)

    if escala == 2:
        YearMonthData = np.array([str(i.year)+'/'+str(i.month) for i in DatesO])
        YearMonth = np.array([str(date(i,j,1).year)+'/'+str(date(i,j,1).month) for i in range(int(yeari),int(yearf)+1) for j in range(1,13)])
        VE = np.empty(YearMonth.shape)*np.nan
        VEMax = np.empty(YearMonth.shape)*np.nan
        VEMin = np.empty(YearMonth.shape)*np.nan

        NF = np.empty(YearMonth.shape)*np.nan
        NNF = np.empty(YearMonth.shape)*np.nan

        for iYM, YM in enumerate(YearMonth):  
            x = np.where(YearMonthData == YM)[0]
            if len(x) != 0:
                q = sum(~np.isnan(V1C[x]))
                NF[iYM] = (q/len(x))
                NNF[iYM] = (1-NF[-1])
                if q >= round(len(x)*0.7,0) and flagNaN:
                    VE[iYM] = Oper[op](V1C[x])
                    VEMax[iYM] = np.nanmax(V1C[x])
                    VEMin[iYM] = np.nanmin(V1C[x])

    # -------------------------------------------
    # Se dan los resultados
    # -------------------------------------------
    if flagMa == True:
        if  flagDF:
            return np.array(FechaEs), np.array(FechaNN), np.array(VE), np.array(VEMax), np.array(VEMin), np.array(NF),np.array(NNF)
        else:
            return np.array(FechaEs), np.array(FechaNN), np.array(VE), np.array(VEMax), np.array(VEMin)
    elif flagMa == False:
        if flagDF:
            return np.array(FechaEs), np.array(FechaNN), np.array(VE),np.array(NF),np.array(NNF)
        else:
            return np.array(FechaEs), np.array(FechaNN), np.array(VE)
コード例 #6
0
def CompDC(Dates,V,DateI,DateE,dtm=None):
    '''
    DESCRIPTION:
    
        This function completes or cut data from specific dates.
    _____________________________________________________________________

    INPUT:
        :param Dates: Data date, it must be a string like this 
                      'Year/month/day' the separator '/' 
                      could be change with any character.  
                      It must be a string vector or a date or datetime vector.
        :param VC:    Variable. 
        :param DateI: Initial Date in date or datetime format.
        :param DateE: Final Date in date or datetime format.
        :param dtm:   Time delta for the full data, if None it 
                      would use the timedelta from the 2 values 
                      of the original data
    _____________________________________________________________________
    
    OUTPUT:
        :return Results: A dict, Dictionary with the following results.
            DatesC: Complete date string vector.
            V1C:    Filled data values.
            DatesN: Complete date Python datetime vector.
    '''
    
    V = np.array(V)
    Dates = np.array(Dates)
    # ---------------------
    # Error managment
    # ---------------------

    if isinstance(Dates[0],str) == False and isinstance(Dates[0],date) == False and isinstance(Dates[0],datetime) == False:
        Er = utl.ShowError('CompD','EDSM','Bad format in dates')
        raise Er
    if len(Dates) != len(V):
        Er = utl.ShowError('CompD','EDSM','Date and V are different length')
        raise Er
    if dtm != None and isinstance(dtm,timedelta) == False:
        Er = utl.ShowError('CompD','EDSM','Bad dtm format')
        raise Er

    # ---------------------
    # Dates Calculations
    # ---------------------
    # Original Dates
    if isinstance(Dates[0],str):
        DatesO = DUtil.Dates_str2datetime(Dates)
    else:
        DatesO = Dates

    if dtm == None:
        dtm = DatesO[1]-DatesO[0]
    DatesN = DUtil.Dates_Comp(DateI,DateE,dtm=dtm)
    
    # -------------------------------------------
    # Data Extraction
    # -------------------------------------------

    # Filled data
    if isinstance(V[0],str):
        VC = (np.empty(len(DatesN))*np.nan).astype(str)
    else:
        VC = (np.empty(len(DatesN))*np.nan)
    
    DatesN = np.array(DatesN)
    DatesO = np.array(DatesO)
    for iF,F in enumerate(DatesO):
        x = np.where(DatesN == F)[0]
        if len(x) == 1:
            VC[x] = V[iF]
        elif len(x) > 1:
            VC[x[0]] = V[iF]
    
    DatesC = DUtil.Dates_datetime2str(DatesN)

    Results = {'DatesC':DatesC,'DatesN':DatesN,'VC':VC}
    return Results
コード例 #7
0
ファイル: Medellin.py プロジェクト: DGD042/Libraries
    def EventsSeriesGen(self,ax,DatesEv,Data,DataV,DataKeyV,DataKey=None,
            PathImg='',Name='',NameArch='',
            GraphInfo={'ylabel':['Precipitación [mm]'],'color':['b'],'label':['Precipitación']},
            GraphInfoV={'color':['-.b'],'label':['Inicio del Evento']},
            flagBig=False,vm={'vmax':[],'vmin':[]},Ev=0,flagV=True,
            flagAverage=False,dt=1,Date='',flagEvent=False):
        '''
        DESCRIPTION:

            Esta función realiza los gráficos de los diferentes eventos
            solamente de los eventos de precipitación.
        _______________________________________________________________________
        INPUT:
            :param DatesEv:     A ndarray, Dates of the events.
            :param Data:        A dict, Diccionario con las variables
                                            que se graficarán.
            :param DataV:       A dict, Diccionario con las líneas verticales
                                        estos deben ser fechas en formato
                                        datetime.
            :param DataKeyV:    A list, Lista con keys de los valores verticales.

        '''
        # Se organizan las fechas
        if flagAverage:
            H = int(len(DatesEv)/2)
            FechaEvv = np.arange(-H,H,1)
            FechaEvv = FechaEvv*dt/60
        else:
            if not(isinstance(DatesEv[0],datetime)):
                FechaEvv = DUtil.Dates_str2datetime(DatesEv)
            else:
                FechaEvv = DatesEv

        if DataKey is None:
            flagSeveral = False
        elif len(DataKey) >= 1:
            flagSeveral = True

        if len(vm['vmax']) == 0 and len(vm['vmin']) == 0:
            flagVm = False
        else:
            flagVm = True

        if flagVm:
            if len(vm['vmax']) >= 1 and len(vm['vmin']) >= 1:
                flagVmax = True
                flagVmin = True
            elif len(vm['vmax']) >= 1:
                flagVmax = True
                flagVmin = False
            elif len(vm['vmin']) >= 1:
                flagVmax = False
                flagVmin = True

        # -------------------------
        # Se grafican los eventos
        # -------------------------

        # Se grafican las dos series
        # fH=30 # Largo de la Figura
        # fV = fH*(2/3) # Ancho de la Figura


        lensize=17
        plt.rcParams.update({'font.size': 15,'font.family': 'sans-serif'\
            ,'font.sans-serif': 'Arial Narrow'\
            ,'xtick.labelsize': 13,'xtick.major.size': 6,'xtick.minor.size': 4\
            ,'xtick.major.width': 1,'xtick.minor.width': 1\
            ,'ytick.labelsize': 15,'ytick.major.size': 12,'ytick.minor.size': 4\
            ,'ytick.major.width': 1,'ytick.minor.width': 1\
            ,'axes.linewidth':1\
            ,'grid.alpha':0.1,'grid.linestyle':'-'})

        # plt.tick_params(
        #     axis='x',          # changes apply to the x-axis
        #     which='both',      # both major and minor ticks are affected
        #     bottom='off',      # ticks along the bottom edge are off
        #     top='off',         # ticks along the top edge are off
        #     labelbottom='off') 

        plt.xticks(rotation=45)
        # f = plt.figure(figsize=DM.cm2inch(fH,fV))
        # ax = host_subplot(111, axes_class=AA.Axes)
        ax.tick_params(axis='x',which='both',bottom='on',top='off',\
            labelbottom='off',direction='out')
        ax.tick_params(axis='y',which='both',left='on',right='off',\
            labelleft='on')
        ax.tick_params(axis='y',which='major',direction='inout') 
        if flagSeveral:
            if len(DataKey) >= 1:
                DataP = Data[DataKey[0]]
        else:
            DataP = Data

        # Se grafica el gráfico principal
        ax.plot(FechaEvv,DataP,color=GraphInfo['color'][0],label=GraphInfo['label'][0])
        if not(flagAverage):
            ax.axis["bottom"].major_ticklabels.set_rotation(30)
            ax.axis["bottom"].major_ticklabels.set_ha("right")
            ax.axis["bottom"].label.set_pad(30)
            ax.axis["bottom"].format_xdata = mdates.DateFormatter('%H%M')
            # ax.axis["bottom"].set_visible(False)
            # ax.axes().get_xaxis().set_visible(False)
        ax.axis["left"].label.set_color(color=GraphInfo['color'][0])
        ax.set_ylabel(GraphInfo['ylabel'][0])

        # Se escala el eje 
        if flagVm:
            if (flagVmax and flagVmin) and (not(vm['vmin'][0] is None) and not(vm['vmax'][0] is None)):
                ax.set_ylim([vm['vmin'][0],vm['vmax'][0]])
            elif flagVmax and not(vm['vmax'][0] is None):
                ax.set_ylim(ymax=vm['vmax'][0])
            elif flagVmin and not(vm['vmin'][0] is None):
                ax.set_ylim(ymin=vm['vmin'][0])

        yTL = ax.yaxis.get_ticklocs() # List of Ticks in y

        # Se grafican las líneas verticales
        if flagV:
            for ilab,lab in enumerate(DataKeyV):
                ax.plot([DataV[lab][0],DataV[lab][0]],[yTL[0],yTL[-1]],
                        GraphInfoV['color'][ilab],label=GraphInfoV['label'][ilab])

        # Se organizan los ejes 
        MyL = (yTL[1]-yTL[0])/5 # minorLocatory value
        minorLocatory = MultipleLocator(MyL)
        ax.yaxis.set_minor_locator(minorLocatory)

        # Se realizan los demás gráficos
        if flagSeveral:
            axi = [ax.twinx() for i in range(len(DataKey)-1)]
            for ilab,lab in enumerate(DataKey):
                if ilab >= 1:
                    axi[ilab-1].plot(FechaEvv,Data[lab],color=GraphInfo['color'][ilab],
                            label=GraphInfo['label'][ilab])
                    axi[ilab-1].set_ylabel(GraphInfo['ylabel'][ilab])
                    if flagVm and len(vm['vmax']) > 1:
                        if (not(vm['vmin'][ilab] is None) and not(vm['vmax'][ilab] is None)):
                            axi[ilab-1].set_ylim([vm['vmin'][ilab],vm['vmax'][ilab]])
                        elif not(vm['vmax'][ilab] is None):
                            axi[ilab-1].set_ylim(ymax=vm['vmax'][ilab])
                        elif not(vm['vmin'][ilab] is None):
                            axi[ilab-1].set_ylim(ymin=vm['vmin'][ilab])

                    if ilab == 2:
                        offset = 60
                        new_fixed_axis = axi[ilab-1].get_grid_helper().new_fixed_axis
                        axi[ilab-1].axis["right"] = new_fixed_axis(loc="right",
                                                        axes=axi[ilab-1],
                                                        offset=(offset, 0))
                        axi[ilab-1].axis["right"].label.set_color(color=GraphInfo['color'][ilab])
                    elif ilab == 3:
                        # axi[ilab-1].spines['right'].set_position(('axes',-0.25))
                        offset = -60
                        new_fixed_axis = axi[ilab-1].get_grid_helper().new_fixed_axis
                        axi[ilab-1].axis["right"] = new_fixed_axis(loc="left",
                                                        axes=axi[ilab-1],
                                                        offset=(offset, 0))
                        axi[ilab-1].axis["right"].label.set_color(color=GraphInfo['color'][ilab])
                    else:
                        offset = 0
                        new_fixed_axis = axi[ilab-1].get_grid_helper().new_fixed_axis
                        axi[ilab-1].axis["right"] = new_fixed_axis(loc="right",
                                                        axes=axi[ilab-1],
                                                        offset=(offset, 0))
                        axi[ilab-1].axis["right"].label.set_color(color=GraphInfo['color'][ilab])

                    # Se organizan los ejes 
                    yTL = axi[ilab-1].yaxis.get_ticklocs() # List of Ticks in y
                    MyL = (yTL[1]-yTL[0])/5 # minorLocatory value
                    minorLocatory = MultipleLocator(MyL)
                    axi[ilab-1].yaxis.set_minor_locator(minorLocatory)
                    axi[ilab-1].format_xdata = mdates.DateFormatter('%H%M')
        ax.set_title(Name)
        return
コード例 #8
0
ファイル: SIATA.py プロジェクト: DGD042/Libraries
    def LoadData(self,Station=None,flagComplete=True,dt=1):
        '''
        DESCRIPTION:
            This function loads the data of a station and compiles 
            it in a dictionary.
        ___________________________________________________________________
        INPUT:
            :param Station:      A str, List with the stations that would
                                    be extracted.
            :param flagComplete: A boolean, flag to determine if
                                            completes the data
        '''
        self.flagComplete = flagComplete
        self.Station = Station
        # -------------------------
        # Error Managment
        # -------------------------
        # -------------------------
        # Stations
        # -------------------------
        # -------------------------
        # Parameters
        # -------------------------
        DataBase = {'DataBaseType':'txt','deli':self.deli,
                'colStr':None,'colData':None,'row_skip':1,'flagHeader':True,
                'rowH':0,'row_end':0,'str_NaN':'','num_NaN':None,
                'dtypeData':float} 
        LabelsH = ['H','maxH','minH']
        LabelsD = ['D','NF', 'NNF', 'maxD', 'M', 'minM', 'maxM', 'minD']

        self.LabelsH = LabelsH
        self.LabelsD = LabelsD

        # -------------------------
        # Verify Headers
        # -------------------------
        # Headers
        Headers = np.genfromtxt(self.Arch[Station][0],dtype=str,skip_header=0,delimiter=self.deli,max_rows=1)
        # Verify colStr data
        colStr = []
        LabStrNo = []
        LabStrYes = []
        for lab in self.LabelsStr:
            x = np.where(Headers == lab)[0]
            if len(x) > 0:
                colStr.append(x[0])
                LabStrYes.append(lab)
            else:
                LabStrNo.append(lab)
        DataBase['colStr'] = tuple(colStr)
        # Verify colData data
        colData = []
        LabDataNo = []
        LabDataYes = []
        for lab in self.LabelsData:
            x = np.where(Headers == lab)[0]
            if len(x) > 0:
                colData.append(x[0])
                LabDataYes.append(lab)
            else:
                LabDataNo.append(lab)
        self.LabStrYes = LabDataYes
        DataBase['colData'] = tuple(colData)

        # -------------------------
        # Extract information
        # -------------------------
        EM = EMSD()

        for iar,ar in enumerate(self.Arch[Station]):
            try:
                R = EM.Open_Data(ar,DataBase=DataBase)
            except ValueError:
                print('Error document data:',ar)
                continue
            if iar == 0:
                Data = R
            else:
                for iLab,Lab in enumerate(LabStrYes):
                    Data[Lab] = np.hstack((Data[Lab],R[Lab]))
                for iLab,Lab in enumerate(LabDataYes):
                    try:
                        Data[Lab] = np.hstack((Data[Lab],R[Lab]))
                    except KeyError:
                        Data[Lab] = np.hstack((Data[Lab],np.empty(R[LabDataYes[0]].shape)*np.nan))
                
        # for Lab in LabStrNo:
        #     R[Lab] = np.array(['nan' for i in len(R['Time'])])
        # for Lab in LabDataNo:
        #     R[Lab] = np.array([np.nan for i in len(R['Time'])])

        # DatesS = [i[:16] for i in Data['Time']]
        DatesS = Data['Time']
        Dates = DUtil.Dates_str2datetime(DatesS,Date_Format='%Y-%m-%d %H:%M:%S')
        Data.pop('Time',None)

        # -------------------------
        # Data Completion
        # -------------------------
        LabelsHmat = []
        LabelsDmat = []
        # Data in years
        DataC = dict()
        DataCC = dict()
        # Se llenan los datos
        DataH = dict() # Datos Horarios
        DataD = dict() # Datos Diarios
        self.DatesC = dict()
        self.DatesD = dict() # Fechas diarias
        for iLab,Lab in enumerate(LabDataYes):
            VC = DMan.CompD(Dates,Data[Lab],dtm=timedelta(0,60))
            # Precipitation corrected
            if Lab == 'HourlyPrecipMM':
                VC['VC'] = VC['VC']*5/60
            if iLab == 0:
                DatesC = VC['DatesC']
            DataC[self.LabDataSave[Lab]] = VC['VC']
            # Se pasa la información a cada 5 minutos
            DatesCC,DatesCN,DataCC[self.LabDataSave[Lab]] = DMan.Ca_E(DatesC,
                    DataC[self.LabDataSave[Lab]],dt=dt,escala=-1,
                    op=self.LabDataOper[Lab],flagNaN=False)
            # Data Eliminations
            if self.ElimOver[self.LabDataSave[Lab]] != None:
                DataCC[self.LabDataSave[Lab]][DataCC[self.LabDataSave[Lab]] > self.ElimOver[self.LabDataSave[Lab]]] = np.nan
            if self.ElimLow[self.LabDataSave[Lab]] != None:
                DataCC[self.LabDataSave[Lab]][DataCC[self.LabDataSave[Lab]] < self.ElimLow[self.LabDataSave[Lab]]] = np.nan

            # Se convierten los datos
            DatesC2, DatesNC2, VC2 = EM.Ca_EC(Date=DatesCC,V1=DataCC[self.LabDataSave[Lab]],
                    op=self.LabDataOper1[Lab],
                    key=None,dtm=dt,op2=self.LabDataOper2[Lab],op3=self.LabDataOper3[Lab])

            for LabH in LabelsH:
                DataH[self.LabDataSave[Lab]+LabH] = VC2[LabH]
                LabelsHmat.append(self.LabDataSave[Lab]+LabH)
            for LabD in LabelsD:
                DataD[self.LabDataSave[Lab]+LabD] = VC2[LabD]
                LabelsDmat.append(self.LabDataSave[Lab]+LabD)
            
        self.DataH = DataH
        self.DataD = DataD
        self.DatesH = DatesC2['DateH']
        self.DatesD['DatesD'] = DatesC2['DateD']
        self.DatesD['DatesM'] = DatesC2['DateM']
        self.DataCC = DataCC
        self.DatesCC = DatesCC
        self.DatesCN = DatesCN
        self.DatesNC2 = DatesNC2['DateMN']
        self.LabelsHmat = LabelsHmat
        self.LabelsDmat = LabelsDmat
        return 
コード例 #9
0
def test_datetime2str():
    Date = DUtil.Dates_datetime2str([datetime(2012, 1, 1, 0, 0)])
    assert isinstance(Date[0], str)
コード例 #10
0
def test_str2datetime():
    Date = DUtil.Dates_str2datetime(['2012-02-01 0100'], flagQuick=True)
    assert isinstance(Date[0], datetime) or isinstance(Date[0], date)