예제 #1
0
def datetime(obj1, start, finish, *attributes):
    """
    Performs a datetime filter on Varpy var_data class
    
    Args:
        Obj1: A Varpy Volcanic data class containing earthquake catalogue data
        start: start datetime of retained data (number or datetime string)
        finish: finish datetime of retained data (number or datetime string)
        attributes: list of attributes (ecvd,scvd ....) which the filter is going to be applied.
    
    Returns:
        Obj2: Filtered Varpy Var_data class
    
    Raises:
    """
    obj2=copy.deepcopy(obj1)

    if not attributes:

        if obj2.type == 'volcanic':
                attributes=['ecvd','evd','scvd']
        else:

            attributes=['ecld, scld']

    
    try:
        start=conversion.date2int(start)
        finish=conversion.date2int(finish)
    except:
        start = float(start)
        finish = float(finish)
        pass
    

    for atr in attributes:     

        if atr != 'scvd' :
            try:
                var_column = getattr(obj2,atr).header.index('datetime')
                getattr(obj2,atr).dataset = getattr(obj2,atr).dataset[logical_and(getattr(obj2,atr).dataset[:,var_column]>=start, getattr(obj2,atr).dataset[:,var_column]<finish),:]
              
            except:
                pass

        else:

           
            for key in obj2.scvd.keys():

                try: 
                    var_column = obj2.scvd[key].header.index('datetime')
                    obj2.scvd[key].dataset=obj2.scvd[key].dataset[logical_and(obj2.scvd[key].dataset[:,var_column]>=start, obj2.scvd[key].dataset[:,var_column]<finish),:]
                except:
                    pass

    return obj2
예제 #2
0
파일: core.py 프로젝트: rosafilgueira/VarPy
    def __apply_model(self, data_type, model_type, model_name, start, finish,
                      **kwargs):
        """
        Private Method. Application of model to the "data_type"  of the object. 
        
        Args:
            self: A Varpy Var_data object
            data_type: the datatype name to which to apply the model (ecvd, scvd, ecld, scld)
            model_type: the model type: retrosepective model fit, prospective model fit or real time (series of prospective model fits)
            model_name: The name of the model to apply. 
            start: the datetime of the start of the modelled period
            finish: the datetime of the finish of the modelled period
            tf: the failure or eruption time
        
        
        Raises:
        """

        try:
            tmin = conversion.date2int(start)
            tmax = conversion.date2int(finish)
        except:
            tmin = start
            tmax = finish
            pass

        #New_Rosa: First of all, we get the current time.
        current_time = str(datetime.now())
        #New_Rosa: Then, we check if we have already this model in our dictionary of models.
        #In case not, a new key-value is appended to the self.data_type.models dictionary, where the key is the model_name.

        getattr(self, data_type).add_model_list(model_name, model_type)

        try:
            #New_Rosa: Later, a new Model_Output object is created for storing later the outputs of the model.
            m1_output = Model_Output()

            #New_Rosa: Once the Model_Output is created, the model (which name is stored in model_name) is applied, and the outputs are stored in the Model_Output object.
            #To do: Change the way to call the models

            model_name_path = 'varpy.modelling.models.' + model_name
            model = importlib.import_module(model_name_path)
            getattr(model, model_name)(self, data_type, m1_output, tmin, tmax,
                                       **kwargs)
            #iol_mle.iol_mle(self, data_type, m1_output, tmin, tmax, **kwargs)

            getattr(self, data_type).models[model_name].update_model(m1_output)

            #getattr(self,data_type).display_models()

        except:
            #New_Rosa: If not, I assume that the function is inside of user_models.py module, and the function is called directly with those arguments
            #m1_output=getattr(user_models,model_name)(self, data_type, m1_output, tmin, tmax, tf, paras)
            #getattr(self,data_type).models[model_name].update_model(m1_output)
            pass
예제 #3
0
def single_analysis(obj1, data_type, model_name, **kwargs):

    obj2 = copy.deepcopy(obj1)
    t_min = kwargs['t_min']
    t_max = kwargs['t_max']
    try:
        t_min = conversion.date2int(t_min)
        t_max = conversion.date2int(t_max)
    except:
        t_min = float(t_min)
        t_max = float(t_max)

    if 'spatial' in kwargs:
        obj2 = getattr(window,
                       kwargs['spatial'])(obj2, kwargs['spatial_x_min'],
                                          kwargs['spatial_x_max'],
                                          kwargs['spatial_y_min'],
                                          kwargs['spatial_y_max'])
    else:
        obj2 = obj1
    if 'single_attribute' in kwargs:
        obj2 = window.single_attribute(obj2, kwargs['single_attribute'],
                                       kwargs['z_min'], kwargs['z_max'],
                                       data_type)

    obj2 = window.datetime(obj2, t_min, t_max, data_type)

    #4. Determine completeness magnitude, apply magnitude filter, based on "mag_comp" option
    if 'mag_comp' in kwargs:
        #May need extra options here, e.g. a default value for when catalogue size is small
        if kwargs['mag_comp'] is 'maxc':
            mc = magnitudes.mag_completeness(
                obj2).ecvd.outputs['completeness_mag'].mc_maxc
        elif kwargs['mag_comp'] is 'GFT':
            mc = magnitudes.mag_completeness(
                obj2).ecvd.outputs['completeness_mag'].Mc_GFT
        elif kwargs['mag_comp'] is 'mbs':
            mc = magnitudes.mag_completeness(
                obj2).ecvd.outputs['completeness_mag'].mc_mbs
        else:
            mc = mag_comp

        obj2 = window.single_attribute(obj2, 'magnitude', mc, 10.0, data_type)

    #5. Apply model to object
    if 'tf' in kwargs:
        obj2.apply_model(data_type, 'retrospective_analysis', model_name,
                         t_min, t_max, **kwargs)
    else:
        obj2.apply_model(data_type, 'single_forecast', model_name, t_min,
                         t_max, **kwargs)

    return obj2
예제 #4
0
def single_analysis(obj1, data_type, model_name, **kwargs):

    obj2=copy.deepcopy(obj1)  
    t_min=kwargs['t_min']
    t_max=kwargs['t_max']
    try:
        t_min = conversion.date2int(t_min)
        t_max = conversion.date2int(t_max)
    except:
        t_min =float(t_min)
        t_max=float(t_max)
            
    if 'spatial' in kwargs:            
        obj2=getattr(window,kwargs['spatial'])(obj2, kwargs['spatial_x_min'], kwargs['spatial_x_max'] , kwargs['spatial_y_min'],kwargs['spatial_y_max'])
    else:
        obj2=obj1
    if 'single_attribute' in kwargs:
        obj2=window.single_attribute(obj2, kwargs['single_attribute'],kwargs['z_min'],kwargs['z_max'], data_type)
    
    obj2=window.datetime(obj2, t_min, t_max, data_type)
   
    #4. Determine completeness magnitude, apply magnitude filter, based on "mag_comp" option
    if 'mag_comp' in kwargs:
    #May need extra options here, e.g. a default value for when catalogue size is small
        if kwargs['mag_comp'] is 'maxc':
            mc = magnitudes.mag_completeness(obj2).ecvd.outputs['completeness_mag'].mc_maxc
        elif kwargs['mag_comp'] is 'GFT':
            mc = magnitudes.mag_completeness(obj2).ecvd.outputs['completeness_mag'].Mc_GFT
        elif kwargs['mag_comp'] is 'mbs':
            mc = magnitudes.mag_completeness(obj2).ecvd.outputs['completeness_mag'].mc_mbs
        else:
            mc = mag_comp
    
        obj2 = window.single_attribute(obj2, 'magnitude', mc, 10.0, data_type)     

    #5. Apply model to object
    if 'tf' in kwargs:
        obj2.apply_model(data_type, 'retrospective_analysis', model_name, t_min, t_max, **kwargs)
    else:
        obj2.apply_model(data_type, 'single_forecast', model_name, t_min, t_max, **kwargs)
    
    return obj2
예제 #5
0
파일: core.py 프로젝트: rosafilgueira/VarPy
    def __apply_model(self, data_type, model_type,  model_name, start, finish, **kwargs): 


        """
        Private Method. Application of model to the "data_type"  of the object. 
        
        Args:
            self: A Varpy Var_data object
            data_type: the datatype name to which to apply the model (ecvd, scvd, ecld, scld)
            model_type: the model type: retrosepective model fit, prospective model fit or real time (series of prospective model fits)
            model_name: The name of the model to apply. 
            start: the datetime of the start of the modelled period
            finish: the datetime of the finish of the modelled period
            tf: the failure or eruption time
        
        
        Raises:
        """
        
        try:
            tmin=conversion.date2int(start)
            tmax=conversion.date2int(finish)
        except:
            tmin=start
            tmax=finish
            pass
        
        
        #New_Rosa: First of all, we get the current time.     
        current_time=str(datetime.now())    
        #New_Rosa: Then, we check if we have already this model in our dictionary of models.
        #In case not, a new key-value is appended to the self.data_type.models dictionary, where the key is the model_name. 

        getattr(self,data_type).add_model_list(model_name,model_type)

        try:
            #New_Rosa: Later, a new Model_Output object is created for storing later the outputs of the model. 
            m1_output=Model_Output()
       
            #New_Rosa: Once the Model_Output is created, the model (which name is stored in model_name) is applied, and the outputs are stored in the Model_Output object. 
            #To do: Change the way to call the models


            model_name_path = 'varpy.modelling.models.'+ model_name 
            model = importlib.import_module(model_name_path)
            getattr(model, model_name)(self, data_type, m1_output, tmin, tmax, **kwargs) 
            #iol_mle.iol_mle(self, data_type, m1_output, tmin, tmax, **kwargs) 
            
    
           

            getattr(self,data_type).models[model_name].update_model(m1_output)
            
            #getattr(self,data_type).display_models()

            
            
        except:  
            #New_Rosa: If not, I assume that the function is inside of user_models.py module, and the function is called directly with those arguments
            #m1_output=getattr(user_models,model_name)(self, data_type, m1_output, tmin, tmax, tf, paras)
            #getattr(self,data_type).models[model_name].update_model(m1_output)
            pass
예제 #6
0
#4
mag_comp = 'GFT'  #method for magnitude filtering. 'None' is no filtering

#5
model = 'iol_mle'  #name of model to apply. Could do many???

#Setup Varpy object
d1 = core.Volcanic(ID)

#Add data to Varpy object
d1.add_datatype('ecvd', ecvd_data_file, ecvd_metadata_file)

#Set-up times to run model
try:
    t_min = conversion.date2int(t_min)
    t_max = conversion.date2int(t_max)
except:
    t_min = float(t_min)
    t_max = float(t_max)
    pass

times = t_max

#Run forecasts in a loop...
for t_forc in str(times):
    #Maybe update object d1 here????
    #1. Apply spatial filter
    #May need a "None" option here
    d2 = window.latlon(d1, lat_min, lat_max, lon_min, lon_max)
    #2. Apply depth filter
예제 #7
0
def ecd_rate_plot(obj1, plot_type=None, t_inc=None, t_lims=None, lon_lims=None, lat_lims=None, z_lims=None, Mc=None, Name=None, Save=None):
    """
    Plot the rate and cumulative number of ecd events with time
    
    Args:
        obj1: a varpy object containing ecd data
        plot_type: rate, cumulative or both (None)
        t_inc: the time increment over which to count event rates, default = 1
        t_lims: [t_min, t_max] defining time axis limits
        lon_lims: [lon_min, lon_max] defining x-axis limits        
        lat_lims: [lat_min, lat_max] defining y-axis limits
        z_lims: [z_min, z_max] defining depth range
        Mc: magnitude cut-off
    
    Returns:
        fig1: the resulting plot
    """
    if obj1.type == 'volcanic':
        data = obj1.ecvd.dataset
        header = obj1.ecvd.header     
    else:    
        data = obj1.ecld.dataset
        header = obj1.ecld.header 
    
    if t_lims is not None:
        try:
            t_min=conversion.date2int(t_lims[0])
            t_max=conversion.date2int(t_lims[1])
        except:
            t_min = float(t_lims[0])
            t_max = float(t_lims[1])
            pass
        data = data[logical_and(data[:,header.index('datetime')]>=t_min,data[:,header.index('datetime')]<t_max),:]
    
    if lon_lims is not None:
        data = data[logical_and(data[:,header.index('longitude')]>=lon_lims[0],data[:,header.index('longitude')]<lon_lims[1]),:]
    
    if lat_lims is not None:
        data = data[logical_and(data[:,header.index('latitude')]>=lat_lims[0],data[:,header.index('latitude')]<lat_lims[1]),:] 
    
    if z_lims is not None:
        data = data[logical_and(data[:,header.index('depth')]>=z_lims[0],data[:,header.index('depth')]<z_lims[1]),:] 
    
    if Mc is not None:
        data = data[data[:,header.index('magnitude')]>=Mc,:] 
    
    dt_data = data[:,header.index('datetime')]
    
    if t_lims is None:
        t_min = floor(dt_data.min())
        t_max = ceil(dt_data.max())
        
    if t_inc is None:
        t_inc = 1.0
    
    fig1 = plt.figure(1, figsize=(8,6))
    ax1 = fig1.add_subplot(111, axisbg='lightgrey')
    
    if plot_type == 'rate' or plot_type == None:
        
        day_bins = arange(t_min, t_max, t_inc)
        DER, DER_bes = histogram(dt_data, day_bins)
        
        if dt_data[0]>693500:
            ax1.bar(mdates.num2date(day_bins[:-1]), DER, width=t_inc, color='darkslategrey', edgecolor='darkslategrey')
            ax1.set_xlabel('Date', fontsize=8)
        else:
            ax1.bar(day_bins[:-1], DER, width=t_inc, color='darkslategrey', edgecolor='darkslategrey')
            ax1.set_xlabel('Day', fontsize=8)    
        
        ax1.set_ylabel('Daily number of earthquakes', fontsize=8)

    if plot_type == 'cumulative' or plot_type == None:
        if plot_type == None:
            ax2 = ax1.twinx()
        else:
            ax2=ax1
                
        if dt_data[0]>693500:
            ax2.plot(mdates.num2date(dt_data), arange(len(dt_data))+1, 'k')
        else:
            ax2.plot(dt_data, arange(len(dt_data))+1, 'k')
        
        ax2.set_ylabel('Total earthquakes', fontsize=8)
        ax1.xaxis.set_ticks_position('bottom')
    
    if  getattr(obj1, 'evd') != None:
        eruption_starts = atleast_2d(obj1.evd.dataset)[:,obj1.evd.header.index('start_datetime')]
        for i in range(len(eruption_starts)):
            if dt_data[0]>693500:
                ax1.axvline(mdates.num2date(eruption_starts[i]), color='red', linestyle='--')
            else:
                ax1.axvline(eruption_starts[i], color='red', linestyle='--')
    
    ax1.set_xlim(t_min,t_max)
    
    if Save is not None:
        if Name is None:
            timestamp=str(time.time())
        else:
            timestamp=Name
        png_name=obj1.figure_path+'/ecd_rate_plot-'+timestamp+'.png'
        eps_name=obj1.figure_path+'/ecd_rate_plot-'+timestamp+'.eps'


        if 'ecd_rate_plot' not in obj1.figures.keys():
            obj1.figures['ecd_rate_plot']=[]
        obj1.figures['ecd_rate_plot'].append(png_name)
        obj1.figures['ecd_rate_plot'].append(eps_name)
        plt.savefig(png_name)
        plt.savefig(eps_name)
    
    return fig1
예제 #8
0
def multiple_analysis(obj1, data_type, model_name, **kwargs):

    obj2=copy.deepcopy(obj1) 
    
    t_step=kwargs['t_step']
    
    if 't_min' not in kwargs:
        #time limits have not been specified, therefore we are in prospective mode
        t_min = conversion.date2int(date.today().strftime("%d-%m-%Y"))
        t_max_a =date.today() + timedelta(days=100)
        t_max = conversion.date2int(t_max_a.strftime("%d-%m-%Y"))
        model_type='prospective_forecast'
    else:
        t_min=kwargs['t_min']
        t_max=kwargs['t_max']
        model_type='retrosepective_forecast'
        try:
            t_min = conversion.date2int(t_min)
            t_max = conversion.date2int(t_max)
        except:
            t_min =float(t_min)
            t_max=float(t_max)
        
    
    times = arange(t_min, t_max, t_step)

    if 'data_file' in kwargs:
        
        data_file=kwargs['data_file']
       

    for t_forc in times:
    
        if model_type == 'prospective_forecast':
            obj2.update_datatype(data_type,data_file)
            
        if 'spatial' in kwargs:

            obj2=getattr(window,kwargs['spatial'])(obj2, kwargs['spatial_x_min'], kwargs['spatial_x_max'] , kwargs['spatial_y_min'],kwargs['spatial_y_max'])
        else:
            obj2=obj1
            
        if 'single_attribute' in kwargs:
            obj2=window.single_attribute(obj2, kwargs['single_attribute'],kwargs['z_min'],kwargs['z_max'], data_type)
        else:
            obj2=obj1
        
        obj2=window.datetime(obj2,t_min,t_forc, data_type)
        n_events = len(getattr(obj2,data_type).dataset)

        if n_events >= 10:
                
            #4. Determine completeness magnitude, apply magnitude filter, based on "mag_comp" option
            if 'mag_comp' in kwargs:
                if n_events<=50:
                    mc = 0.
                
                else:
                    if kwargs['mag_comp'] is 'maxc':
                        mc = magnitudes.mag_completeness(obj2).ecvd.outputs['completeness_mag'].mc_maxc
                    elif kwargs['mag_comp'] is 'GFT':
                        mc = magnitudes.mag_completeness(obj2).ecvd.outputs['completeness_mag'].Mc_GFT
                    elif kwargs['mag_comp'] is 'mbs':
                        mc = magnitudes.mag_completeness(obj2).ecvd.outputs['completeness_mag'].mc_mbs
                    else:
                        mc = mag_comp
            
                obj2 = window.single_attribute(obj2, 'magnitude', mc, 10.0, data_type)
    
            #5. Apply model to object
            if 'tf' in kwargs:
                obj2.apply_model(data_type, model_type, model_name, t_min, t_forc, **kwargs)
            else:
                obj2.apply_model(data_type, model_type, model_name, t_min, t_forc, **kwargs)
                
            #6. Modify the model output to record the value of mc
                  
            model_output=getattr(obj2,data_type).last_model_output(model_name)
            model_output.t_forc=t_forc
            model_output.mc=mc
            
            getattr(obj2,data_type).update_last_model_output(model_name,model_output)
            
        else:
            pass
        
    return obj2
예제 #9
0
def model_plot(obj1, data_type, model_name, plot_type=None, t_inc=None, t_lims=None):
    if data_type is 'ecvd' or 'ecld':
        
        #1. Determine t_min and t_max - check these aren't stored in model information...
        if t_lims is not None:
            try:
                t_min=conversion.date2int(t_lims[0])
                t_max=conversion.date2int(t_lims[1])
            except:
                t_min = float(t_lims[0])
                t_max = float(t_lims[1])
                pass
        else:
            if obj1.type == 'volcanic':
                data = obj1.ecvd.dataset
                header = obj1.ecvd.header     
            else:    
                data = obj1.ecld.dataset
                header = obj1.ecld.header
            
            dt_data = data[:,header.index('datetime')] 
              
            t_min = floor(dt_data.min())
            t_max = ceil(dt_data.max())
        
        #2. Plot the underlying rate/total graph using "rate_plots" - still need to confirm this works
        #Seems unnecessary to repeat same plotting code here.
        fig = rate_plots.ecd_rate_plot(obj1, plot_type, t_inc=None, t_lims=None, Save=None)
        
        #3. Get the axes from fig to modify with new series - how best to do this?
        axes = fig.get_axes()
        ax1 = axes[0]
        if plot_type == None:            
            ax2 = axes[1]
        elif plot_type == 'cumulative':
            ax2 = axes[0]
        
        #4. Create series of times between t_max, t_min to evaulate model rate, totals
        times = linspace(t_min, t_max, 500)
        
        for model in getattr(obj1,data_type).models.keys():
            
            m0=getattr(obj1,data_type).models[model].outputs

            for m1 in m0:
                #6. For each model, get parameters
                params = m1.dataset
                
                if plot_type == 'rate' or plot_type == None:
                    #7. Determine, plot rates
                    #Check that the correct "daily rate" are reported...
                    rate_func =  m1.metadata['rate_func']
                    rates = getattr(rate_funcs, rate_func[0])(times-t_min, params)
                    ax1.plot(times, rates, '-r')            
                
                if plot_type == 'cumulative' or plot_type == None:
                    #8. Determine, plot totals
                    total_func = m1.metadata['total_func']
                    totals = getattr(rate_funcs, total_func[0])(times-t_min, 0., params)
                    ax2.plot(times, totals, '-r')
                
            #9. Legend
         
    else:
        #Alternative for SCVD, SCLD data?
        print 'SCLD and SCVD data not currently supported'
    
    ax1.set_xlim(t_min, t_max)
    
    png_name=obj1.figure_path+'/model_plot.png'
    eps_name=obj1.figure_path+'/model_plot.eps'
    plt.savefig(png_name)
    plt.savefig(eps_name)
예제 #10
0
def iet_plot(obj1,
             Norm=None,
             model=None,
             t_lims=None,
             lon_lims=None,
             lat_lims=None,
             z_lims=None,
             Mc=None):
    """
    Plot a histogram of interevent times and a pdf of normalized IETs
    
    Args:
        obj1: a varpy object containing event catalogue data
        Norm: if True, normalize the IET histogram
        model: option to fit and bootstrap CoIs for model. Existing options: Poisson, Gamma
        t_lims: [t_min, t_max] defining time axis limits
        lon_lims: [lon_min, lon_max] defining x-axis limits        
        lat_lims: [lat_min, lat_max] defining y-axis limits
        z_lims: [z_min, z_max] defining depth range
        Mc: magnitude cut-off

    Returns:
        fig1: a png image of the resulting plot
    """

    if obj1.type == 'volcanic':
        data = obj1.ecvd.dataset
        header = obj1.ecvd.header
    else:
        data = obj1.ecld.dataset
        header = obj1.ecld.header

    if t_lims is not None:
        try:
            t_min = conversion.date2int(t_lims[0])
            t_max = conversion.date2int(t_lims[1])
        except:
            t_min = float(t_lims[0])
            t_max = float(t_lims[1])
            pass
        data = data[logical_and(data[:, header.index('datetime')] >= t_min,
                                data[:, header.index('datetime')] < t_max), :]

    if lon_lims is not None:
        data = data[
            logical_and(data[:, header.index('longitude')] >= lon_lims[0],
                        data[:, header.index('longitude')] < lon_lims[1]), :]

    if lat_lims is not None:
        data = data[
            logical_and(data[:, header.index('latitude')] >= lat_lims[0],
                        data[:, header.index('latitude')] < lat_lims[1]), :]

    if z_lims is not None:
        data = data[logical_and(data[:, header.index('depth')] >= z_lims[0],
                                data[:, header.index('depth')] < z_lims[1]), :]

    if Mc is not None:
        data = data[data[:, header.index('magnitude')] >= Mc, :]

    dt_data = data[:, header.index('datetime')]

    iets = diff(dt_data, n=1)
    iet_mean = mean(iets)

    iet_bins = logspace(-5.0, 2.0, num=50)
    mid_iet_bins = iet_bins[:-1] + diff(iet_bins) / 2

    iet_counts, iet_bes = histogram(iets, iet_bins)

    ##########
    fig1 = plt.figure(1, figsize=(12, 6))
    ax1 = fig1.add_subplot(121, axisbg='lightgrey')

    ax1.semilogx(mid_iet_bins, iet_counts, '-s', color='blue')

    ax2 = fig1.add_subplot(122, axisbg='lightgrey')
    ax2.loglog(mid_iet_bins / iet_mean,
               (iet_mean * iet_counts) / (diff(iet_bins) * len(iets)),
               '-s',
               color='blue')

    if Norm is True:
        norm = iet_mean
    else:
        norm = 1.

    if model is not None:
        #Bootstrap 95% COIs
        iet_bstps = 1000
        rates_bstps = zeros((len(iet_bins) - 1, iet_bstps))

        if model is 'Gamma':
            #fit gamma model
            fit_alpha, fit_loc, fit_beta = gamma.fit(iets, loc=0.0)
            for j in range(iet_bstps):
                model_sim = gamma.rvs(fit_alpha,
                                      loc=fit_loc,
                                      scale=fit_beta,
                                      size=len(iets))
                rates_bstps[:, j], model_bes = histogram(model_sim, iet_bins)

            coi_95 = scoreatpercentile(rates_bstps.transpose(), 95, axis=0)
            coi_5 = scoreatpercentile(rates_bstps.transpose(), 5, axis=0)

            ax1.semilogx(
                mid_iet_bins / norm,
                gamma.pdf(mid_iet_bins, fit_alpha, fit_loc, fit_beta) *
                diff(iet_bins) * len(iets), 'r')
            ax1.semilogx(mid_iet_bins / norm, coi_95, 'r:')
            ax1.semilogx(mid_iet_bins / norm, coi_5, 'r:')

            ax2.loglog(mid_iet_bins / iet_mean,
                       (iet_mean *
                        gamma.pdf(mid_iet_bins, fit_alpha, fit_loc, fit_beta)),
                       'r')
            ax2.loglog(mid_iet_bins / iet_mean,
                       (iet_mean * coi_95) / (diff(iet_bins) * len(iets)),
                       'r:')
            ax2.loglog(mid_iet_bins / iet_mean,
                       (iet_mean * coi_5) / (diff(iet_bins) * len(iets)), 'r:')

        elif model is 'Poisson':
            #fit exponential model
            for j in range(iet_bstps):
                model_sim = expon.rvs(scale=iet_mean, size=len(iets))
                rates_bstps[:, j], model_bes = histogram(model_sim, iet_bins)

            coi_95 = scoreatpercentile(rates_bstps.transpose(), 95, axis=0)
            coi_5 = scoreatpercentile(rates_bstps.transpose(), 5, axis=0)

            ax1.semilogx(
                mid_iet_bins / norm,
                expon.pdf(mid_iet_bins, loc=0, scale=iet_mean) *
                diff(iet_bins) * len(iets), 'r')
            ax1.semilogx(mid_iet_bins / norm, coi_95, 'r:')
            ax1.semilogx(mid_iet_bins / norm, coi_5, 'r:')

            ax2.loglog(
                mid_iet_bins / iet_mean,
                (iet_mean * expon.pdf(mid_iet_bins, loc=0, scale=iet_mean)),
                'r')
            ax2.loglog(mid_iet_bins / iet_mean,
                       (iet_mean * coi_95) / (diff(iet_bins) * len(iets)),
                       'r:')
            ax2.loglog(mid_iet_bins / iet_mean,
                       (iet_mean * coi_5) / (diff(iet_bins) * len(iets)), 'r:')

    if Norm is True:
        ax1.set_xlabel(r'$\tau \backslash \bar\tau$ (days)')
    else:
        ax1.set_xlabel(r'$\tau$ (days)')

    ax1.set_ylabel('Frequency')
    ax1.xaxis.set_ticks_position('bottom')

    ax2.set_xlim(0.00008, 200)
    ax2.set_ylim(0.00001, 1000)
    ax2.set_xlabel(r'$\tau \backslash \bar\tau$ (days)')
    ax2.set_ylabel('pdf')

    ax2.xaxis.set_ticks_position('bottom')

    png_name = obj1.figure_path + '/iet_plots.png'
    eps_name = obj1.figure_path + '/iet_plots.eps'
    plt.savefig(png_name)
    plt.savefig(eps_name)
예제 #11
0
def model_error_plot(obj1,
                     data_type,
                     model_name,
                     t_lims=None,
                     percentiles=None,
                     n_bs=None):
    if data_type is 'ecvd' or 'ecld':

        #1. Determine t_min and t_max - check these aren't stored in model information...
        if t_lims is not None:
            try:
                t_min = conversion.date2int(t_lims[0])
                t_max = conversion.date2int(t_lims[1])
            except:
                t_min = float(t_lims[0])
                t_max = float(t_lims[1])
                pass
        else:
            if obj1.type == 'volcanic':
                data = obj1.ecvd.dataset
                header = obj1.ecvd.header
            else:
                data = obj1.ecld.dataset
                header = obj1.ecld.header

            dt_data = data[:, header.index('datetime')]

            t_min = floor(dt_data.min())
            t_max = ceil(dt_data.max())

        t_inc = (t_max - t_min) / 11.

        fig1 = plt.figure(1, figsize=(8, 6))
        ax1 = fig1.add_subplot(111, axisbg='lightgrey')

        rate_bins = linspace(t_min, t_max, 11)
        mid_bins = rate_bins[:-1] + diff(rate_bins) / 2.
        DER, DER_bes = histogram(dt_data, rate_bins)

        if dt_data[0] > 693500:
            ax1.plot(mdates.num2date(mid_bins), DER, 'o')
            ax1.set_xlabel('Date', fontsize=8)
        else:
            ax1.plot(mid_bins, DER, 'o')
            ax1.set_xlabel('Day', fontsize=8)

        ax1.set_ylabel('rate of earthquakes', fontsize=8)

        #4. Create series of times between t_max, t_min to evaulate model rate, totals
        times = linspace(t_min, t_max, 500)

        for model in getattr(obj1, data_type).models.keys():

            m0 = getattr(obj1, data_type).models[model].outputs

            for m1 in m0:
                #6. For each model, get parameters
                params = m1.dataset

                rate_func = m1.metadata['rate_func']
                rates = getattr(rate_funcs, rate_func[0])(times - t_min,
                                                          params)
                ax1.plot(times, rates * t_inc, '-r')

                #Bootstrap confidence limits
                coi_l, coi_u = model_stats.model_CoIs(rate_func, params,
                                                      rate_bins, percentiles,
                                                      n_bs)

                ax1.plot(mid_bins, coi_l, 'r:')
                ax1.plot(mid_bins, coi_u, 'r:')

    else:
        #Alternative for SCVD, SCLD data?
        print 'SCLD and SCVD data not currently supported'

    ax1.set_xlim(t_min, t_max)

    png_name = obj1.figure_path + '/model_plot.png'
    eps_name = obj1.figure_path + '/model_plot.eps'
    plt.savefig(png_name)
    plt.savefig(eps_name)
예제 #12
0
def model_plot(obj1,
               data_type,
               model_name,
               plot_type=None,
               t_inc=None,
               t_lims=None):
    if data_type is 'ecvd' or 'ecld':

        #1. Determine t_min and t_max - check these aren't stored in model information...
        if t_lims is not None:
            try:
                t_min = conversion.date2int(t_lims[0])
                t_max = conversion.date2int(t_lims[1])
            except:
                t_min = float(t_lims[0])
                t_max = float(t_lims[1])
                pass
        else:
            if obj1.type == 'volcanic':
                data = obj1.ecvd.dataset
                header = obj1.ecvd.header
            else:
                data = obj1.ecld.dataset
                header = obj1.ecld.header

            dt_data = data[:, header.index('datetime')]

            t_min = floor(dt_data.min())
            t_max = ceil(dt_data.max())

        #2. Plot the underlying rate/total graph using "rate_plots" - still need to confirm this works
        #Seems unnecessary to repeat same plotting code here.
        fig = rate_plots.ecd_rate_plot(obj1,
                                       plot_type,
                                       t_inc=None,
                                       t_lims=None,
                                       Save=None)

        #3. Get the axes from fig to modify with new series - how best to do this?
        axes = fig.get_axes()
        ax1 = axes[0]
        if plot_type == None:
            ax2 = axes[1]
        elif plot_type == 'cumulative':
            ax2 = axes[0]

        #4. Create series of times between t_max, t_min to evaulate model rate, totals
        times = linspace(t_min, t_max, 500)

        for model in getattr(obj1, data_type).models.keys():

            m0 = getattr(obj1, data_type).models[model].outputs

            for m1 in m0:
                #6. For each model, get parameters
                params = m1.dataset

                if plot_type == 'rate' or plot_type == None:
                    #7. Determine, plot rates
                    #Check that the correct "daily rate" are reported...
                    rate_func = m1.metadata['rate_func']
                    rates = getattr(rate_funcs, rate_func[0])(times - t_min,
                                                              params)
                    ax1.plot(times, rates, '-r')

                if plot_type == 'cumulative' or plot_type == None:
                    #8. Determine, plot totals
                    total_func = m1.metadata['total_func']
                    totals = getattr(rate_funcs, total_func[0])(times - t_min,
                                                                0., params)
                    ax2.plot(times, totals, '-r')

            #9. Legend

    else:
        #Alternative for SCVD, SCLD data?
        print 'SCLD and SCVD data not currently supported'

    ax1.set_xlim(t_min, t_max)

    png_name = obj1.figure_path + '/model_plot.png'
    eps_name = obj1.figure_path + '/model_plot.eps'
    plt.savefig(png_name)
    plt.savefig(eps_name)
예제 #13
0
def datetime(obj1, start, finish, *attributes):
    """
    Performs a datetime filter on Varpy var_data class
    
    Args:
        Obj1: A Varpy Volcanic data class containing earthquake catalogue data
        start: start datetime of retained data (number or datetime string)
        finish: finish datetime of retained data (number or datetime string)
        attributes: list of attributes (ecvd,scvd ....) which the filter is going to be applied.
    
    Returns:
        Obj2: Filtered Varpy Var_data class
    
    Raises:
    """
    obj2 = copy.deepcopy(obj1)

    if not attributes:

        if obj2.type == 'volcanic':
            attributes = ['ecvd', 'evd', 'scvd']
        else:

            attributes = ['ecld, scld']

    try:
        start = conversion.date2int(start)
        finish = conversion.date2int(finish)
    except:
        start = float(start)
        finish = float(finish)
        pass

    for atr in attributes:

        if atr != 'scvd':
            try:
                var_column = getattr(obj2, atr).header.index('datetime')
                getattr(obj2, atr).dataset = getattr(
                    obj2, atr).dataset[logical_and(
                        getattr(obj2, atr).dataset[:, var_column] >= start,
                        getattr(obj2, atr).dataset[:, var_column] < finish), :]

            except:
                pass

        else:

            for key in obj2.scvd.keys():

                try:
                    var_column = obj2.scvd[key].header.index('datetime')
                    obj2.scvd[key].dataset = obj2.scvd[key].dataset[
                        logical_and(
                            obj2.scvd[key].dataset[:,
                                                   var_column] >= start, obj2.
                            scvd[key].dataset[:, var_column] < finish), :]
                except:
                    pass

    return obj2
예제 #14
0
def multiple_analysis(obj1, data_type, model_name, **kwargs):

    obj2 = copy.deepcopy(obj1)

    t_step = kwargs['t_step']

    if 't_min' not in kwargs:
        #time limits have not been specified, therefore we are in prospective mode
        t_min = conversion.date2int(date.today().strftime("%d-%m-%Y"))
        t_max_a = date.today() + timedelta(days=100)
        t_max = conversion.date2int(t_max_a.strftime("%d-%m-%Y"))
        model_type = 'prospective_forecast'
    else:
        t_min = kwargs['t_min']
        t_max = kwargs['t_max']
        model_type = 'retrosepective_forecast'
        try:
            t_min = conversion.date2int(t_min)
            t_max = conversion.date2int(t_max)
        except:
            t_min = float(t_min)
            t_max = float(t_max)

    times = arange(t_min, t_max, t_step)

    if 'data_file' in kwargs:

        data_file = kwargs['data_file']

    for t_forc in times:

        if model_type == 'prospective_forecast':
            obj2.update_datatype(data_type, data_file)

        if 'spatial' in kwargs:

            obj2 = getattr(window,
                           kwargs['spatial'])(obj2, kwargs['spatial_x_min'],
                                              kwargs['spatial_x_max'],
                                              kwargs['spatial_y_min'],
                                              kwargs['spatial_y_max'])
        else:
            obj2 = obj1

        if 'single_attribute' in kwargs:
            obj2 = window.single_attribute(obj2, kwargs['single_attribute'],
                                           kwargs['z_min'], kwargs['z_max'],
                                           data_type)
        else:
            obj2 = obj1

        obj2 = window.datetime(obj2, t_min, t_forc, data_type)
        n_events = len(getattr(obj2, data_type).dataset)

        if n_events >= 10:

            #4. Determine completeness magnitude, apply magnitude filter, based on "mag_comp" option
            if 'mag_comp' in kwargs:
                if n_events <= 50:
                    mc = 0.

                else:
                    if kwargs['mag_comp'] is 'maxc':
                        mc = magnitudes.mag_completeness(
                            obj2).ecvd.outputs['completeness_mag'].mc_maxc
                    elif kwargs['mag_comp'] is 'GFT':
                        mc = magnitudes.mag_completeness(
                            obj2).ecvd.outputs['completeness_mag'].Mc_GFT
                    elif kwargs['mag_comp'] is 'mbs':
                        mc = magnitudes.mag_completeness(
                            obj2).ecvd.outputs['completeness_mag'].mc_mbs
                    else:
                        mc = mag_comp

                obj2 = window.single_attribute(obj2, 'magnitude', mc, 10.0,
                                               data_type)

            #5. Apply model to object
            if 'tf' in kwargs:
                obj2.apply_model(data_type, model_type, model_name, t_min,
                                 t_forc, **kwargs)
            else:
                obj2.apply_model(data_type, model_type, model_name, t_min,
                                 t_forc, **kwargs)

            #6. Modify the model output to record the value of mc

            model_output = getattr(obj2,
                                   data_type).last_model_output(model_name)
            model_output.t_forc = t_forc
            model_output.mc = mc

            getattr(obj2, data_type).update_last_model_output(
                model_name, model_output)

        else:
            pass

    return obj2
예제 #15
0
def scatter_plot(obj1,
                 variable1,
                 variable2=None,
                 colour=None,
                 x_lims=None,
                 y_lims=None,
                 t_lims=None,
                 lon_lims=None,
                 lat_lims=None,
                 z_lims=None,
                 Mc=None):
    """
    Plot the evolution of ecvd variables. If a single variable is specified, this is plotted as a function of datetime. 
    If a second variable is specified, this is plotted on the y-axis, with variable1 on the x-axis
    
    Args:
        obj1: a varpy object containing 
        variable1: first ecvd variable
        variable2: second ecvd variable (optional)
        colour: variable to define colour scale
        x_lims: [x_min, x_max] defining x-axis limits
        y_lims: [y_min, y_max] defining y-axis limits
        t_lims: [t_min, t_max] defining time axis limits
        lon_lims: [lon_min, lon_max] defining x-axis limits        
        lat_lims: [lat_min, lat_max] defining y-axis limits
        z_lims: [z_min, z_max] defining depth range
        Mc: magnitude cut-off
    
    Returns:
        fig1: a png image of the resulting plot
    """
    if obj1.type == 'volcanic':
        data = obj1.ecvd.dataset
        header = obj1.ecvd.header
    else:
        data = obj1.ecld.dataset
        header = obj1.ecld.header

    if t_lims is not None:
        try:
            t_min = conversion.date2int(t_lims[0])
            t_max = conversion.date2int(t_lims[1])
        except:
            t_min = float(t_lims[0])
            t_max = float(t_lims[1])
            pass
        data = data[logical_and(data[:, header.index('datetime')] >= t_min,
                                data[:, header.index('datetime')] < t_max), :]

    if lon_lims is not None:
        data = data[
            logical_and(data[:, header.index('longitude')] >= lon_lims[0],
                        data[:, header.index('longitude')] < lon_lims[1]), :]

    if lat_lims is not None:
        data = data[
            logical_and(data[:, header.index('latitude')] >= lat_lims[0],
                        data[:, header.index('latitude')] < lat_lims[1]), :]

    if z_lims is not None:
        data = data[logical_and(data[:, header.index('depth')] >= z_lims[0],
                                data[:, header.index('depth')] < z_lims[1]), :]

    if Mc is not None:
        data = data[data[:, header.index('magnitude')] >= Mc, :]

    if variable2 is None:
        v1_data = data[:, header.index('datetime')]
        v2_data = data[:, header.index(variable1)]

        if t_lims is None:
            t_min = floor(v1_data.min())
            t_max = ceil(v1_data.max())
        else:
            t_min = t_lims[0]
            t_max = t_lims[1]

        if y_lims is None:
            y_min = v2_data.min()
            y_max = v2_data.max()
        else:
            y_min = y_lims[0]
            y_max = y_lims[1]
    else:
        v1_data = data[:, header.index(variable1)]
        v2_data = data[:, header.index(variable2)]

        if x_lims is None:
            x_min = v1_data.min()
            x_max = v1_data.max()
        else:
            x_min = x_lims[0]
            x_max = x_lims[1]

        if y_lims is None:
            y_min = v2_data.min()
            y_max = v2_data.max()
        else:
            y_min = y_lims[0]
            y_max = y_lims[1]

    fig1 = plt.figure(1, figsize=(8, 6))
    ax1 = fig1.add_subplot(111, axisbg='lightgrey')

    c_val = 'blue'  #default
    if colour is not None:
        cvar_column = header.index(colour)
        c_val = data[:, cvar_column]

    if variable2 is None:
        if v1_data[0] > 693500:
            ax1.scatter(mdates.num2date(v1_data),
                        v2_data,
                        marker='o',
                        s=9,
                        c=c_val,
                        edgecolor='none')
            ax1.set_xlabel('Date', fontsize=10)
        else:
            ax1.scatter(v1_data,
                        v2_data,
                        marker='o',
                        s=9,
                        c=c_val,
                        edgecolor='none')
            ax1.set_xlabel('Day', fontsize=10)

        ax1.set_ylabel(variable1, fontsize=10)

        ax1.set_xlim(t_min, t_max)
        ax1.set_ylim(y_min, y_max)

        if variable1 == 'depth':
            ax1.invert_yaxis()

    else:
        ax1.scatter(v1_data,
                    v2_data,
                    marker='o',
                    s=9,
                    c=c_val,
                    edgecolor='none')
        ax1.set_xlabel(variable1, fontsize=10)
        ax1.set_ylabel(variable2, fontsize=10)

        ax1.set_xlim(x_min, x_max)
        ax1.set_ylim(y_min, y_max)

        if variable2 == 'depth':
            ax1.invert_yaxis()

    png_name = obj1.figure_path + '/' + variable1 + '_scatter.png'
    eps_name = obj1.figure_path + '/' + variable1 + '_scatter.eps'
    plt.savefig(png_name)
    plt.savefig(eps_name)
예제 #16
0
def ecd_rate_plot(obj1,
                  plot_type=None,
                  t_inc=None,
                  t_lims=None,
                  lon_lims=None,
                  lat_lims=None,
                  z_lims=None,
                  Mc=None,
                  Name=None,
                  Save=None):
    """
    Plot the rate and cumulative number of ecd events with time
    
    Args:
        obj1: a varpy object containing ecd data
        plot_type: rate, cumulative or both (None)
        t_inc: the time increment over which to count event rates, default = 1
        t_lims: [t_min, t_max] defining time axis limits
        lon_lims: [lon_min, lon_max] defining x-axis limits        
        lat_lims: [lat_min, lat_max] defining y-axis limits
        z_lims: [z_min, z_max] defining depth range
        Mc: magnitude cut-off
    
    Returns:
        fig1: the resulting plot
    """
    if obj1.type == 'volcanic':
        data = obj1.ecvd.dataset
        header = obj1.ecvd.header
    else:
        data = obj1.ecld.dataset
        header = obj1.ecld.header

    if t_lims is not None:
        try:
            t_min = conversion.date2int(t_lims[0])
            t_max = conversion.date2int(t_lims[1])
        except:
            t_min = float(t_lims[0])
            t_max = float(t_lims[1])
            pass
        data = data[logical_and(data[:, header.index('datetime')] >= t_min,
                                data[:, header.index('datetime')] < t_max), :]

    if lon_lims is not None:
        data = data[
            logical_and(data[:, header.index('longitude')] >= lon_lims[0],
                        data[:, header.index('longitude')] < lon_lims[1]), :]

    if lat_lims is not None:
        data = data[
            logical_and(data[:, header.index('latitude')] >= lat_lims[0],
                        data[:, header.index('latitude')] < lat_lims[1]), :]

    if z_lims is not None:
        data = data[logical_and(data[:, header.index('depth')] >= z_lims[0],
                                data[:, header.index('depth')] < z_lims[1]), :]

    if Mc is not None:
        data = data[data[:, header.index('magnitude')] >= Mc, :]

    dt_data = data[:, header.index('datetime')]

    if t_lims is None:
        t_min = floor(dt_data.min())
        t_max = ceil(dt_data.max())

    if t_inc is None:
        t_inc = 1.0

    fig1 = plt.figure(1, figsize=(8, 6))
    ax1 = fig1.add_subplot(111, axisbg='lightgrey')

    if plot_type == 'rate' or plot_type == None:

        day_bins = arange(t_min, t_max, t_inc)
        DER, DER_bes = histogram(dt_data, day_bins)

        if dt_data[0] > 693500:
            ax1.bar(mdates.num2date(day_bins[:-1]),
                    DER,
                    width=t_inc,
                    color='darkslategrey',
                    edgecolor='darkslategrey')
            ax1.set_xlabel('Date', fontsize=8)
        else:
            ax1.bar(day_bins[:-1],
                    DER,
                    width=t_inc,
                    color='darkslategrey',
                    edgecolor='darkslategrey')
            ax1.set_xlabel('Day', fontsize=8)

        ax1.set_ylabel('Daily number of earthquakes', fontsize=8)

    if plot_type == 'cumulative' or plot_type == None:
        if plot_type == None:
            ax2 = ax1.twinx()
        else:
            ax2 = ax1

        if dt_data[0] > 693500:
            ax2.plot(mdates.num2date(dt_data), arange(len(dt_data)) + 1, 'k')
        else:
            ax2.plot(dt_data, arange(len(dt_data)) + 1, 'k')

        ax2.set_ylabel('Total earthquakes', fontsize=8)
        ax1.xaxis.set_ticks_position('bottom')

    if getattr(obj1, 'evd') != None:
        eruption_starts = atleast_2d(
            obj1.evd.dataset)[:, obj1.evd.header.index('start_datetime')]
        for i in range(len(eruption_starts)):
            if dt_data[0] > 693500:
                ax1.axvline(mdates.num2date(eruption_starts[i]),
                            color='red',
                            linestyle='--')
            else:
                ax1.axvline(eruption_starts[i], color='red', linestyle='--')

    ax1.set_xlim(t_min, t_max)

    if Save is not None:
        if Name is None:
            timestamp = str(time.time())
        else:
            timestamp = Name
        png_name = obj1.figure_path + '/ecd_rate_plot-' + timestamp + '.png'
        eps_name = obj1.figure_path + '/ecd_rate_plot-' + timestamp + '.eps'

        if 'ecd_rate_plot' not in obj1.figures.keys():
            obj1.figures['ecd_rate_plot'] = []
        obj1.figures['ecd_rate_plot'].append(png_name)
        obj1.figures['ecd_rate_plot'].append(eps_name)
        plt.savefig(png_name)
        plt.savefig(eps_name)

    return fig1
예제 #17
0
#4
mag_comp = 'GFT' #method for magnitude filtering. 'None' is no filtering

#5
model = 'iol_mle' #name of model to apply. Could do many???

#Setup Varpy object
d1 = core.Volcanic(ID)

#Add data to Varpy object
d1.add_datatype('ecvd',ecvd_data_file, ecvd_metadata_file)

#Set-up times to run model
try:
    t_min=conversion.date2int(t_min)
    t_max=conversion.date2int(t_max)
except:
    t_min = float(t_min)
    t_max = float(t_max)
    pass

times = linspace(t_min, t_max, t_step)

#Run forecasts in a loop...
for t_forc in times:
    print "iter"
    print t_forc	 	
    #Maybe update object d1 here????
    #1. Apply spatial filter
    #May need a "None" option here
예제 #18
0
def plot_map(obj1, boundary=None, colour=None, t_lims=None, lon_lims=None, lat_lims=None, z_lims=None, Mc=None):
    """
    Plot a map of ecvd locations
    
    Args:
        obj1: a varpy object containing ecvd data
        colour: variable to define colour scale (e.g. 'depth')
        boundary: a list of lon_min, lon_max, lat_min, lat_max
        t_lims: [t_min, t_max] defining time axis limits
        lon_lims: [lon_min, lon_max] defining x-axis limits        
        lat_lims: [lat_min, lat_max] defining y-axis limits
        z_lims: [z_min, z_max] defining depth range
        Mc: magnitude cut-off
        
    Returns:
        fig1: a png image of the resulting plot
    """
    
    data = obj1.ecvd.dataset
    header = obj1.ecvd.header
    
    if t_lims is not None:
        try:
            t_min=conversion.date2int(t_lims[0])
            t_max=conversion.date2int(t_lims[1])
        except:
            t_min = float(t_lims[0])
            t_max = float(t_lims[1])
            pass
        data = data[logical_and(data[:,header.index('datetime')]>=t_min,data[:,header.index('datetime')]<t_max),:]
    
    if lon_lims is not None:
        data = data[logical_and(data[:,header.index('longitude')]>=lon_lims[0],data[:,header.index('longitude')]<lon_lims[1]),:]
    
    if lat_lims is not None:
        data = data[logical_and(data[:,header.index('latitude')]>=lat_lims[0],data[:,header.index('latitude')]<lat_lims[1]),:] 
    
    if z_lims is not None:
        data = data[logical_and(data[:,header.index('depth')]>=z_lims[0],data[:,header.index('depth')]<z_lims[1]),:] 
    
    if Mc is not None:
        data = data[data[:,header.index('magnitude')]>=Mc,:] 
    
    if colour == None:
        c_val = 'blue'
    else:
        cvar_column = header.index(colour)
        c_val =  data[:,cvar_column]
    
    lats = data[:,header.index('latitude')]
    lons = data[:,header.index('longitude')]
    
    if boundary == None:
        lon_min = lons.min()
        lon_max = lons.max()
        lat_min = lats.min()
        lat_max = lats.max()
    else:
        lon_min = boundary[0]
        lon_max = boundary[1]
        lat_min = boundary[2]
        lat_max = boundary[3]
    
    m = Basemap(llcrnrlon=lon_min, llcrnrlat=lat_min, urcrnrlon=lon_max, urcrnrlat=lat_max,
            resolution='h',projection='tmerc',lon_0=lon_max +(lon_max-lon_min)/2.,lat_0=lat_max +(lat_max-lat_min)/2.)

    m.drawcoastlines()
    x,y = m(lons,lats)
    
    m.scatter(x,y, s=10, c=c_val, marker='o', edgecolor='none')
    
    delta = 0.001
    parallels = arange(around(lat_min,1),around(lat_max,1),delta)
    meridians = arange(around(lon_min,1),around(lon_max,1),delta)
    
    while logical_or(len(parallels)>10,len(meridians)>10,):
        delta = delta*10
        parallels = arange(around(lat_min,1),around(lat_max,1),delta)
        meridians = arange(around(lon_min,1),around(lon_max,1),delta)
    
    m.drawparallels(parallels,labels=[1,0,0,1])
    m.drawmeridians(meridians,labels=[1,0,0,1])
    
    m.drawmapboundary(fill_color='lightgrey')
    #m.fillcontinents(color='grey',lake_color='aqua')    
    #m.readshapefile('../Data/100m_contours', 'Contours')
    
    png_name=obj1.figure_path+'/map.png'
    eps_name=obj1.figure_path+'/map.eps'
    plt.savefig(png_name)
    plt.savefig(eps_name)
#
예제 #19
0
def rate_histogram(obj1,
                   model=None,
                   interval=None,
                   t_lims=None,
                   lon_lims=None,
                   lat_lims=None,
                   z_lims=None,
                   Mc=None):
    """
    Plot a histogram of earthquake rates
    
    Args:
        obj1: a varpy object containing event catalogue data
        model: option to fit and bootstrap CoIs for model. Existing options: Poisson
        interval: bin width (default is daily)
        t_lims: [t_min, t_max] defining time axis limits
        lon_lims: [lon_min, lon_max] defining x-axis limits        
        lat_lims: [lat_min, lat_max] defining y-axis limits
        z_lims: [z_min, z_max] defining depth range
        Mc: magnitude cut-off
    
    Returns:
        fig1: a png image of the resulting plot
    """
    if obj1.type == 'volcanic':
        data = obj1.ecvd.dataset
        header = obj1.ecvd.header
    else:
        data = obj1.ecld.dataset
        header = obj1.ecld.header

    if t_lims is not None:
        try:
            t_min = conversion.date2int(t_lims[0])
            t_max = conversion.date2int(t_lims[1])
        except:
            t_min = float(t_lims[0])
            t_max = float(t_lims[1])
            pass
        data = data[logical_and(data[:, header.index('datetime')] >= t_min,
                                data[:, header.index('datetime')] < t_max), :]

    if lon_lims is not None:
        data = data[
            logical_and(data[:, header.index('longitude')] >= lon_lims[0],
                        data[:, header.index('longitude')] < lon_lims[1]), :]

    if lat_lims is not None:
        data = data[
            logical_and(data[:, header.index('latitude')] >= lat_lims[0],
                        data[:, header.index('latitude')] < lat_lims[1]), :]

    if z_lims is not None:
        data = data[logical_and(data[:, header.index('depth')] >= z_lims[0],
                                data[:, header.index('depth')] < z_lims[1]), :]

    if Mc is not None:
        data = data[data[:, header.index('magnitude')] >= Mc, :]

    dt_data = data[:, header.index('datetime')]

    if t_lims is None:
        t_min = floor(dt_data.min())
        t_max = ceil(dt_data.max())

    if interval is not None:
        bin_width = interval
    else:
        bin_width = 1.

    der_bins = arange(t_min, t_max + bin_width, bin_width)

    ders, der_bes = histogram(dt_data, der_bins)

    rate_bins = arange(-0.5, ders.max() + 1.5)
    mid_rate_bins = rate_bins[:-1] + diff(rate_bins) / 2.
    rate_freqs, rate_bes = histogram(ders, rate_bins)

    fig1 = plt.figure(1, figsize=(8, 6))
    ax1 = fig1.add_subplot(111, axisbg='lightgrey')

    ax1.bar(mid_rate_bins,
            rate_freqs,
            color='grey',
            edgecolor='darkgrey',
            align='center')

    if model is not None:
        der_mean = mean(ders)

        #Bootstrap 95% COIs
        rate_bstps = 1000

        rates_bstps = zeros((len(rate_bins) - 1, rate_bstps))

        for j in range(rate_bstps):
            if model is 'Poisson':
                model_sim = poisson.rvs(der_mean, size=len(ders))

            rates_bstps[:, j], model_bes = histogram(model_sim, rate_bins)

        poisson_coi_95 = scoreatpercentile(rates_bstps.transpose(), 95, axis=0)
        poisson_coi_5 = scoreatpercentile(rates_bstps.transpose(), 5, axis=0)

        ax1.plot(
            mid_rate_bins,
            poisson.pmf(mid_rate_bins, der_mean) * diff(rate_bins) * len(ders),
            '-or')
        ax1.plot(mid_rate_bins, poisson_coi_95, 'r:')
        ax1.plot(mid_rate_bins, poisson_coi_5, 'r:')

    ax1.set_xlabel('Rate', fontsize=8)
    ax1.set_ylabel('Frequency', fontsize=8)

    ax1.xaxis.set_ticks_position('bottom')

    png_name = obj1.figure_path + '/rate_histogram.png'
    eps_name = obj1.figure_path + '/rate_histogram.eps'
    plt.savefig(png_name)
    plt.savefig(eps_name)
예제 #20
0
#3. Time variabl3s
t_step = 1.0  #time between model applications, in days (for ecvd, maybe minutes for ecld?). #If None means single application

#4
mag_comp = 'GFT'  #method for magnitude filtering. 'None' is no filtering

#5
model = 'iol_mle'  #name of model to apply.

#Setup Varpy object
d1 = core.Volcanic(ID)
#Add data to Varpy object
d1.add_datatype('ecvd', ecvd_data_file, ecvd_metadata_file)

#time limits have not been specified, therefore we are in prospective mode
t_min = conversion.date2int(date.today().strftime("%d-%m-%Y"))
t_max_a = date.today() + timedelta(days=100)
t_max = conversion.date2int(t_max_a.strftime("%d-%m-%Y"))
times = linspace(t_min, t_max, t_step)

#Run forecasts in a loop...
for t_forc in times:
    #Maybe update object d1 here????
    #1. Apply spatial filter
    #May need a "None" option here
    d2 = window.latlon(d1, lat_min, lat_max, lon_min, lon_max)
    #2. Apply depth filter
    #May need a "None" option here
    d3 = window.single_attribute(d2, 'depth', z_min, z_max, 'ecvd')

    #3. Select time window
예제 #21
0
def model_error_plot(obj1, data_type, model_name, t_lims=None, percentiles=None, n_bs=None):
    if data_type is 'ecvd' or 'ecld':
        
        #1. Determine t_min and t_max - check these aren't stored in model information...
        if t_lims is not None:
            try:
                t_min=conversion.date2int(t_lims[0])
                t_max=conversion.date2int(t_lims[1])
            except:
                t_min = float(t_lims[0])
                t_max = float(t_lims[1])
                pass
        else:
            if obj1.type == 'volcanic':
                data = obj1.ecvd.dataset
                header = obj1.ecvd.header     
            else:    
                data = obj1.ecld.dataset
                header = obj1.ecld.header
            
            dt_data = data[:,header.index('datetime')] 
              
            t_min = floor(dt_data.min())
            t_max = ceil(dt_data.max())
        
        t_inc = (t_max-t_min)/11.
        
        fig1 = plt.figure(1, figsize=(8,6))
        ax1 = fig1.add_subplot(111, axisbg='lightgrey')
        
        rate_bins = linspace(t_min, t_max, 11)
        mid_bins = rate_bins[:-1] + diff(rate_bins)/2.
        DER, DER_bes = histogram(dt_data, rate_bins)
    
        if dt_data[0]>693500:
            ax1.plot(mdates.num2date(mid_bins), DER, 'o')
            ax1.set_xlabel('Date', fontsize=8)
        else:
            ax1.plot(mid_bins, DER, 'o')
            ax1.set_xlabel('Day', fontsize=8)    
    
        ax1.set_ylabel('rate of earthquakes', fontsize=8)
        
        #4. Create series of times between t_max, t_min to evaulate model rate, totals
        times = linspace(t_min, t_max, 500)
                
        for model in getattr(obj1,data_type).models.keys():
            
            m0=getattr(obj1,data_type).models[model].outputs

            for m1 in m0:
                #6. For each model, get parameters
                params = m1.dataset
                
                rate_func =  m1.metadata['rate_func']
                rates = getattr(rate_funcs, rate_func[0])(times-t_min, params)
                ax1.plot(times, rates*t_inc, '-r')
            
                #Bootstrap confidence limits
                coi_l, coi_u = model_stats.model_CoIs(rate_func, params, rate_bins, percentiles, n_bs)

                ax1.plot(mid_bins, coi_l, 'r:')
                ax1.plot(mid_bins, coi_u, 'r:')
    
    else:
        #Alternative for SCVD, SCLD data?
        print 'SCLD and SCVD data not currently supported'
    
    ax1.set_xlim(t_min, t_max)
    
    png_name=obj1.figure_path+'/model_plot.png'
    eps_name=obj1.figure_path+'/model_plot.eps'
    plt.savefig(png_name)
    plt.savefig(eps_name)
#
예제 #22
0
def mag_mc_plot(obj1,
                colour=None,
                y_lims=None,
                t_lims=None,
                lon_lims=None,
                lat_lims=None,
                z_lims=None):
    """
    Plot the evolution of magntiudes with time, and show completeness magnitudes
    
    Args:
        obj1: a varpy object containing 
        variable: the ecvd variable to plot as a function of time
        colour: variable to define colour scale
        y_lims: [y_min, y_max] defining y-axis limits
        t_lims: [t_min, t_max] defining time axis limits
        lon_lims: [lon_min, lon_max] defining x-axis limits        
        lat_lims: [lat_min, lat_max] defining y-axis limits
        z_lims: [z_min, z_max] defining depth range
    
    Returns:
        fig1: a png image of the resulting plot
    """
    if obj1.type == 'volcanic':
        data = obj1.ecvd.dataset
        header = obj1.ecvd.header
    else:
        data = obj1.ecld.dataset
        header = obj1.ecld.header

    if t_lims is not None:
        try:
            t_min = conversion.date2int(t_lims[0])
            t_max = conversion.date2int(t_lims[1])
        except:
            t_min = float(t_lims[0])
            t_max = float(t_lims[1])
            pass
        data = data[logical_and(data[:, header.index('datetime')] >= t_min,
                                data[:, header.index('datetime')] < t_max), :]

    if lon_lims is not None:
        data = data[
            logical_and(data[:, header.index('longitude')] >= lon_lims[0],
                        data[:, header.index('longitude')] < lon_lims[1]), :]

    if lat_lims is not None:
        data = data[
            logical_and(data[:, header.index('latitude')] >= lat_lims[0],
                        data[:, header.index('latitude')] < lat_lims[1]), :]

    if z_lims is not None:
        data = data[logical_and(data[:, header.index('depth')] >= z_lims[0],
                                data[:, header.index('depth')] < z_lims[1]), :]

    dt_data = data[:, header.index('datetime')]
    mag_data = data[:, header.index('magnitude')]

    if t_lims is None:
        t_min = floor(dt_data.min())
        t_max = ceil(dt_data.max())

    if y_lims is None:
        y_lims = [mag_data.min(), mag_data.max()]

    c_val = 'blue'  #default
    if colour is not None:
        cvar_column = header.index(colour)
        c_val = data[:, cvar_column]

    GFT = mags.mc_GFT(mag_data, 0.1, 0.1)
    MBS = mags.mc_mbs(mag_data, 0.1, 0.1)

    mc_maxc = mags.mc_maxc(mag_data, 0.1, 0.1)
    mc_GFT = GFT.Mc_GFT
    mc_mbs = MBS.Mc_mbs

    fig1 = plt.figure(1, figsize=(8, 6))
    ax1 = fig1.add_subplot(111, axisbg='lightgrey')

    if dt_data[0] > 693500:
        ax1.scatter(mdates.num2date(dt_data),
                    mag_data,
                    marker='o',
                    s=9,
                    c=c_val,
                    edgecolor='none')
        ax1.set_xlabel('Date', fontsize=8)
    else:
        ax1.scatter(dt_data,
                    mag_data,
                    marker='o',
                    s=9,
                    c='blue',
                    edgecolor='none')
        ax1.set_xlabel('Day', fontsize=8)

    ax1.set_ylabel('Magnitude', fontsize=8)
    ax1.xaxis.set_ticks_position('bottom')
    ax1.set_xlim(t_min, t_max)
    ax1.set_ylim(y_lims[0], y_lims[1])

    ax1.axhline(mc_maxc, color='red', ls='--')
    ax1.axhline(mc_GFT, color='green', ls='--')
    ax1.axhline(mc_mbs, color='blue', ls='--')

    png_name = obj1.figure_path + '/M_time_plot.png'
    eps_name = obj1.figure_path + '/M_time_plot.eps'
    plt.savefig(png_name)
    plt.savefig(eps_name)
예제 #23
0
def mag_mc_plot(obj1, colour=None, y_lims=None, t_lims=None, lon_lims=None, lat_lims=None, z_lims=None):
    """
    Plot the evolution of magntiudes with time, and show completeness magnitudes
    
    Args:
        obj1: a varpy object containing 
        variable: the ecvd variable to plot as a function of time
        colour: variable to define colour scale
        y_lims: [y_min, y_max] defining y-axis limits
        t_lims: [t_min, t_max] defining time axis limits
        lon_lims: [lon_min, lon_max] defining x-axis limits        
        lat_lims: [lat_min, lat_max] defining y-axis limits
        z_lims: [z_min, z_max] defining depth range
    
    Returns:
        fig1: a png image of the resulting plot
    """   
    if obj1.type == 'volcanic':
        data = obj1.ecvd.dataset
        header = obj1.ecvd.header     
    else:    
        data = obj1.ecld.dataset
        header = obj1.ecld.header 
    
    if t_lims is not None:
        try:
            t_min=conversion.date2int(t_lims[0])
            t_max=conversion.date2int(t_lims[1])
        except:
            t_min = float(t_lims[0])
            t_max = float(t_lims[1])
            pass
        data = data[logical_and(data[:,header.index('datetime')]>=t_min,data[:,header.index('datetime')]<t_max),:]
    
    if lon_lims is not None:
        data = data[logical_and(data[:,header.index('longitude')]>=lon_lims[0],data[:,header.index('longitude')]<lon_lims[1]),:]
    
    if lat_lims is not None:
        data = data[logical_and(data[:,header.index('latitude')]>=lat_lims[0],data[:,header.index('latitude')]<lat_lims[1]),:] 
    
    if z_lims is not None:
        data = data[logical_and(data[:,header.index('depth')]>=z_lims[0],data[:,header.index('depth')]<z_lims[1]),:] 
    
    dt_data = data[:,header.index('datetime')]
    mag_data = data[:,header.index('magnitude')]
    
    if t_lims is None:
        t_min = floor(dt_data.min())
        t_max = ceil(dt_data.max())
    
    if y_lims is None:
        y_lims = [mag_data.min(), mag_data.max()]
    
    c_val = 'blue' #default
    if colour is not None:
        cvar_column = header.index(colour)
        c_val = data[:,cvar_column]
   
    GFT = mags.mc_GFT(mag_data, 0.1, 0.1)
    MBS = mags.mc_mbs(mag_data, 0.1, 0.1)
    
    mc_maxc = mags.mc_maxc(mag_data, 0.1, 0.1)
    mc_GFT = GFT.Mc_GFT
    mc_mbs = MBS.Mc_mbs
    
    fig1 = plt.figure(1, figsize=(8,6))
    ax1 = fig1.add_subplot(111,axisbg='lightgrey')
    
    if dt_data[0]>693500:
        ax1.scatter(mdates.num2date(dt_data), mag_data, marker='o', s=9, c=c_val, edgecolor='none')
        ax1.set_xlabel('Date', fontsize=8)
    else:
        ax1.scatter(dt_data, mag_data, marker='o', s=9, c='blue', edgecolor='none')
        ax1.set_xlabel('Day', fontsize=8)
    
    ax1.set_ylabel('Magnitude', fontsize=8)
    ax1.xaxis.set_ticks_position('bottom')
    ax1.set_xlim(t_min,t_max)
    ax1.set_ylim(y_lims[0],y_lims[1])

    ax1.axhline(mc_maxc, color='red', ls = '--')
    ax1.axhline(mc_GFT, color='green', ls = '--')
    ax1.axhline(mc_mbs, color='blue', ls = '--')
    
    png_name=obj1.figure_path+'/M_time_plot.png'
    eps_name=obj1.figure_path+'/M_time_plot.eps'
    plt.savefig(png_name)
    plt.savefig(eps_name)
예제 #24
0
def plot_map(obj1,
             boundary=None,
             colour=None,
             t_lims=None,
             lon_lims=None,
             lat_lims=None,
             z_lims=None,
             Mc=None):
    """
    Plot a map of ecvd locations
    
    Args:
        obj1: a varpy object containing ecvd data
        colour: variable to define colour scale (e.g. 'depth')
        boundary: a list of lon_min, lon_max, lat_min, lat_max
        t_lims: [t_min, t_max] defining time axis limits
        lon_lims: [lon_min, lon_max] defining x-axis limits        
        lat_lims: [lat_min, lat_max] defining y-axis limits
        z_lims: [z_min, z_max] defining depth range
        Mc: magnitude cut-off
        
    Returns:
        fig1: a png image of the resulting plot
    """

    data = obj1.ecvd.dataset
    header = obj1.ecvd.header

    if t_lims is not None:
        try:
            t_min = conversion.date2int(t_lims[0])
            t_max = conversion.date2int(t_lims[1])
        except:
            t_min = float(t_lims[0])
            t_max = float(t_lims[1])
            pass
        data = data[logical_and(data[:, header.index('datetime')] >= t_min,
                                data[:, header.index('datetime')] < t_max), :]

    if lon_lims is not None:
        data = data[
            logical_and(data[:, header.index('longitude')] >= lon_lims[0],
                        data[:, header.index('longitude')] < lon_lims[1]), :]

    if lat_lims is not None:
        data = data[
            logical_and(data[:, header.index('latitude')] >= lat_lims[0],
                        data[:, header.index('latitude')] < lat_lims[1]), :]

    if z_lims is not None:
        data = data[logical_and(data[:, header.index('depth')] >= z_lims[0],
                                data[:, header.index('depth')] < z_lims[1]), :]

    if Mc is not None:
        data = data[data[:, header.index('magnitude')] >= Mc, :]

    if colour == None:
        c_val = 'blue'
    else:
        cvar_column = header.index(colour)
        c_val = data[:, cvar_column]

    lats = data[:, header.index('latitude')]
    lons = data[:, header.index('longitude')]

    if boundary == None:
        lon_min = lons.min()
        lon_max = lons.max()
        lat_min = lats.min()
        lat_max = lats.max()
    else:
        lon_min = boundary[0]
        lon_max = boundary[1]
        lat_min = boundary[2]
        lat_max = boundary[3]

    m = Basemap(llcrnrlon=lon_min,
                llcrnrlat=lat_min,
                urcrnrlon=lon_max,
                urcrnrlat=lat_max,
                resolution='h',
                projection='tmerc',
                lon_0=lon_max + (lon_max - lon_min) / 2.,
                lat_0=lat_max + (lat_max - lat_min) / 2.)

    m.drawcoastlines()
    x, y = m(lons, lats)

    m.scatter(x, y, s=10, c=c_val, marker='o', edgecolor='none')

    delta = 0.001
    parallels = arange(around(lat_min, 1), around(lat_max, 1), delta)
    meridians = arange(around(lon_min, 1), around(lon_max, 1), delta)

    while logical_or(
            len(parallels) > 10,
            len(meridians) > 10,
    ):
        delta = delta * 10
        parallels = arange(around(lat_min, 1), around(lat_max, 1), delta)
        meridians = arange(around(lon_min, 1), around(lon_max, 1), delta)

    m.drawparallels(parallels, labels=[1, 0, 0, 1])
    m.drawmeridians(meridians, labels=[1, 0, 0, 1])

    m.drawmapboundary(fill_color='lightgrey')
    #m.fillcontinents(color='grey',lake_color='aqua')
    #m.readshapefile('../Data/100m_contours', 'Contours')

    png_name = obj1.figure_path + '/map.png'
    eps_name = obj1.figure_path + '/map.eps'
    plt.savefig(png_name)
    plt.savefig(eps_name)