def get_data(region_id, start_date, end_date, data_from="request"):
    """Gets all the data needed in the plots and pickles it so that I don't need to do requests to make plots.

    :param region_id:       [int]    Region ID is an int as given i ForecastRegionKDV
    :param start_date:      [string] Start date. Data for this date is not included in requests from OData
    :param end_date:
    :param data_from:       [string] Default "request". Other options: "request and save" and "local storage"
    :return:
    """

    filename = "{3}dangerandproblemplot_id{0} {1}-{2}.pickle".format(region_id, start_date.strftime('%Y'), end_date.strftime('%y'), env.local_storage)

    if "request" in data_from:
        if end_date > dt.date(2014, 11, 01) and start_date > dt.date(2014, 11, 01): # Early years dont have this avalanche problem
            problems = gp.get_all_problems(region_id, start_date, end_date, add_danger_level=False)
        else:
            problems = []

        dangers = gd.get_all_dangers(region_id, start_date, end_date)

        if "request and save" in data_from:
            mp.pickle_anything([problems, dangers], filename)

    elif "local storage" in data_from:
        problems, dangers = mp.unpickle_anything(filename)

    else:
        print "rundagerandproblem.py -> get_data: unknown data handler."
        problems = None
        dangers = None


    return problems, dangers
Esempio n. 2
0
def step_1_make_data_set(region_id, from_date, to_date):
    """Makes the dataset of all observed avalanche activity (inl signs and isingle avalanches obs) and mapps
    to forecasts for those days.

    :param region_id:   [int or list of ints]
    :param from_date:   [date]
    :param to_date:     [date]
    :return:
    """

    # get all data
    dangers = gd.get_all_dangers(region_id, from_date, to_date)
    avalanches = go.get_avalanche_activity(region_id, from_date, to_date)
    single_avalanches = go.get_avalanche(region_id, from_date, to_date)
    danger_signs = go.get_danger_sign(region_id, from_date, to_date)

    # List of only forecasts
    forecasted_dangers = []
    for d in dangers:
        if 'Forecast' in d.data_table and d.danger_level != 0:
            forecasted_dangers.append(d)

    # List of only valid activity observations
    observed_activity = []
    for a in avalanches:
        if not 'Ikke gitt' in a.EstimatedNumName:
            observed_activity.append(a)

    # list of relevant danger observations
    danger_sign_avalanches = []
    for ds in danger_signs:
        if 'Ferske skred' in ds.DangerSignName or 'Ingen faretegn observert' in ds.DangerSignName:
            danger_sign_avalanches.append(ds)

    # list of relevant singel avalanches
    observed_avalanche = []
    for sa in single_avalanches:
        if not 'Ikke gitt' in sa.DestructiveSizeName:
            observed_avalanche.append(sa)

    # Make list of all regions pr date and append forecasts and observations.
    data_date_region = []
    for d in forecasted_dangers:
        danger_date = d.date
        print '{0}'.format(danger_date)
        danger_region_name = d.region_name

        data = DataOnDateInRegion(danger_date, danger_region_name)
        data.forecast.append(d)

        for a in observed_activity:
            aval_date = a.DtAvalancheTime.date()
            aval_region_name = a.ForecastRegionName
            if aval_date == danger_date and aval_region_name == danger_region_name:
                data.avalanche_activity.append(a)

        for da in danger_sign_avalanches:
            aval_date = da.DtObsTime.date()
            aval_region_name = da.ForecastRegionName
            if aval_date == danger_date and aval_region_name == danger_region_name:
                data.danger_sign.append(da)

        for oa in observed_avalanche:
            aval_date = oa.DtAvalancheTime.date()
            aval_region_name = oa.ForecastRegionName
            if aval_date == danger_date and aval_region_name == danger_region_name:
                data.avalanche.append(oa)

        data_date_region.append(data)

    # discard days and regions where no observations present
    date_region = []
    for d in data_date_region:
        if not len(d.avalanche_activity) == 0 or not len(d.danger_sign) == 0 or not len(d.avalanche) == 0:
            date_region.append(d)

    return date_region, forecasted_dangers
Esempio n. 3
0
    ForecastRegionKDV = gkdv.get_kdv('ForecastRegionKDV')
    for k, v in ForecastRegionKDV.iteritems():
        if 99 < k < 150 and v.IsActive is True:
            region_ids.append(v.ID)

    from_date = dt.date(2015, 11, 30)
    to_date = dt.date(2016, 6, 1)
    #to_date = dt.date.today()

    drift_nick = 'drift@svv'
    #drift_id = 237
    pickle_file_name = '{0}runelrappdata.pickle'.format(env.local_storage)
    output_file = '{0}elrappdata 2015-16.csv'.format(env.output_folder)

    if get_new:
        dangers = gd.get_all_dangers(region_ids, from_date, to_date)
        forecast_danger = []
        drift_svv_danger = []
        for d in dangers:
            if 'Varsel' in d.source:
                forecast_danger.append(d)
            if d.nick is not None:
                if drift_nick in d.nick:
                    drift_svv_danger.append(d)

        aval_indexes = gm.get_avalanche_index(
            from_date, to_date,
            region_ids=region_ids)  #, nick_names=drift_nick)
        drift_svv_index = []
        for i in aval_indexes:
            if drift_nick in i.observation.NickName:
    ForecastRegionKDV = gkdv.get_kdv('ForecastRegionKDV')
    for k, v in ForecastRegionKDV.iteritems():
        if 99 < k < 150 and v.IsActive is True:
            region_id.append(v.ID)

    ## The output
    plot_file_name = 'Danger level and danger sign 2015-16.png'

    ##################################### End of configuration ###################################

    pickle_file_name_1 = '{0}runlevelanddangersign part 1.pickle'.format(env.local_storage)
    pickle_file_name_2 = '{0}runlevelanddangersign part 2.pickle'.format(env.local_storage)

    if get_new:
        # get all data and save to pickle
        all_danger_levels = gd.get_all_dangers(region_id, from_date, to_date)
        all_danger_signs = go.get_danger_sign(from_date, to_date, region_ids=region_id, geohazard_tid=10)
        mp.pickle_anything([all_danger_levels, all_danger_signs], pickle_file_name_1)
    else:
        # load data from pickle
        all_danger_levels, all_danger_signs = mp.unpickle_anything(pickle_file_name_1)

    if make_new:
        # for counting days with danger levels
        level_count = []
        data = {1:[], 2:[], 3:[], 4:[], 5:[]}
        for dl in all_danger_levels:
            if dl.source == 'Varsel' and dl.danger_level is not 0:
                level_count.append(dl.danger_level)
                for ds in all_danger_signs:
                    if dl.date == ds.DtObsTime.date() and dl.region_name in ds.ForecastRegionName:
Esempio n. 5
0
        if 99 < k < 150 and v.IsActive is True:
            region_id.append(v.ID)

    ## The output
    plot_file_name = 'Danger level and danger sign 2015-16.png'

    ##################################### End of configuration ###################################

    pickle_file_name_1 = '{0}runlevelanddangersign part 1.pickle'.format(
        env.local_storage)
    pickle_file_name_2 = '{0}runlevelanddangersign part 2.pickle'.format(
        env.local_storage)

    if get_new:
        # get all data and save to pickle
        all_danger_levels = gd.get_all_dangers(region_id, from_date, to_date)
        all_danger_signs = go.get_danger_sign(from_date,
                                              to_date,
                                              region_ids=region_id,
                                              geohazard_tid=10)
        mp.pickle_anything([all_danger_levels, all_danger_signs],
                           pickle_file_name_1)
    else:
        # load data from pickle
        all_danger_levels, all_danger_signs = mp.unpickle_anything(
            pickle_file_name_1)

    if make_new:
        # for counting days with danger levels
        level_count = []
        data = {1: [], 2: [], 3: [], 4: [], 5: []}