def get_2015_16_warnings(how_to_get_data='Get new and dont pickle', pickle_file_name=None):
    '''

    :param hot_to_get_data:     'Get new and dont pickle', 'Get new and save pickle' or 'Load pickle'
    :param file_name:           Not needed if no pickles involved
    :return:
    '''

    if 'Get new' in how_to_get_data:

        from_date = dt.date(2015, 11, 30)
        #to_date = dt.date.today()
        to_date = dt.date(2016, 05, 31)

        region_ids = [117, 128]  # Trollheimen
        region_ids = gm.get_active_forecast_regions()

        all_warnings = []
        for region_id in region_ids:
            all_warnings += gd.get_forecasted_dangers(region_id, from_date, to_date, include_problems=True, include_ikke_vurdert=False)

        # Sort by date
        all_warnings = sorted(all_warnings, key=lambda AvalancheDanger: AvalancheDanger.date)

        if 'and save pickle' in how_to_get_data:
            mp.pickle_anything(all_warnings, pickle_file_name)

    elif 'Load pickle' in how_to_get_data:
        all_warnings = mp.unpickle_anything(pickle_warnings_file_name)

    else:
        all_warnings = 'No valid data retrival method given in get_2015_16_warnings.'

    return all_warnings
def make_2015_16_plots():
    """Plots both observations pr observer and pr region for display on webpage for the season 2015-16.
    Method includes a request for list of relevant observers.

    :return:
    """

    # get a list of relevant observerst to plot and make plickle in the web-folder
    observer_list = gm.get_observer_dict_for_2015_16_ploting()
    mp.pickle_anything(observer_list, '{0}observerlist.pickle'.format(env.web_root_folder))

    # list of months to be ploted
    months = []
    month = dt.date(2015,11,1)
    while month < dt.date.today():
        months.append(month)
        almost_next = month + dt.timedelta(days=35)
        month = dt.date(almost_next.year, almost_next.month, 1)

    ## Get all regions
    region_ids = []
    ForecastRegionKDV = gkdv.get_kdv('ForecastRegionKDV')
    for k, v in ForecastRegionKDV.iteritems():
        if 100 < k < 150 and v.IsActive is True:
            region_ids.append(v.ID)

    make_observer_plots(observer_list, months)
    make_region_plots(region_ids, months)

    return
Example #3
0
def pickle_warnings(regions, date_from, date_to, pickle_file_name):
    '''All forecasted warnings and problems are selected from regObs or the avalanche api.
    Dangers and problems are connected and neatly pickel'd for later use.

    :param regions:            list [int] RegionID as given in regObs [101-199]
    :param date_from:          string as 'yyyy-mm-dd'
    :param date_to:            string as 'yyyy-mm-dd'
    :param pickle_file_name:   filename including directory as string
    :return:
    '''

    warnings = []

    for r in regions:

        # get all warning and problems for this region and then loop though them joining them where dates match.
        region_warnings = gfa.get_warnings(r, date_from, date_to)
        name = gro.get_forecast_region_name(r)
        problems = gro.get_problems_from_AvalancheWarnProblemV(r, date_from, date_to)

        print 'runmatrix.py -> pickle_warnings: {0} problems found for {1}'.format(len(problems), name)

        for i in range(0, len(region_warnings), 1):
            for j in range(0, len(problems), 1):
                if region_warnings[i].date == problems[j].date:
                    region_warnings[i].add_problem(problems[j])

        warnings += region_warnings

    # make sure all problems are ordered from lowest id (main problem) to largest.
    for w in warnings:
        w.avalanche_problems = sorted(w.avalanche_problems, key=lambda AvalancheProblem: AvalancheProblem.order)

    mp.pickle_anything(warnings, pickle_file_name)
def get_data(region_id, start_date, end_date, data_from="request"):
    """Gets all the data needed in the plots and pickles it so that I don't need to do requests to make plots.

    :param region_id:       [int]    Region ID is an int as given i ForecastRegionKDV
    :param start_date:      [string] Start date. Data for this date is not included in requests from OData
    :param end_date:
    :param data_from:       [string] Default "request". Other options: "request and save" and "local storage"
    :return:
    """

    filename = "{3}dangerandproblemplot_id{0} {1}-{2}.pickle".format(region_id, start_date.strftime('%Y'), end_date.strftime('%y'), env.local_storage)

    if "request" in data_from:
        if end_date > dt.date(2014, 11, 01) and start_date > dt.date(2014, 11, 01): # Early years dont have this avalanche problem
            problems = gp.get_all_problems(region_id, start_date, end_date, add_danger_level=False)
        else:
            problems = []

        dangers = gd.get_all_dangers(region_id, start_date, end_date)

        if "request and save" in data_from:
            mp.pickle_anything([problems, dangers], filename)

    elif "local storage" in data_from:
        problems, dangers = mp.unpickle_anything(filename)

    else:
        print "rundagerandproblem.py -> get_data: unknown data handler."
        problems = None
        dangers = None


    return problems, dangers
Example #5
0
def pickle_data_set(warnings, file_name, use_ikke_gitt=False):
    '''Data preperation continued. Takes the warnings which is a list of class AvalancheDanger objects and makes a dictionary
    data set of if. The value indexes relate to each other. I.e. distribution, level, probability etc.
    at the nth index originate from the same problem.

    The data set also includes information on what the xKDV tables in regObs contains and preferred colors when
    plotting.

    :param warnings:        list of AvalancheDanger objects
    :param file_name:       full path and filename to pickle the data to
    :param use_ikke_gitt:   If I dont whant to use the ID = 0 (Ikke gitt) values they can be omitted all in all.

    :return:
    '''

    level_list = []
    size_list = []
    trigger_list = []
    probability_list = []
    distribution_list = []

    for w in warnings:
        if w.danger_level > 0 and len(w.avalanche_problems) > 0:
            # The first problem in avalanche_problems is used. This is the main problem.
            level_list.append(w.danger_level)
            size_list.append(w.avalanche_problems[0].aval_size)
            trigger_list.append(w.avalanche_problems[0].aval_trigger)
            probability_list.append(w.avalanche_problems[0].aval_probability)
            distribution_list.append(w.avalanche_problems[0].aval_distribution)

        # Test is lengths match and give warning if not.
        control = (len(level_list) + len(size_list) + len(trigger_list) + len(probability_list) + len(distribution_list))/5
        if not control == len(level_list):
            print("runForMatrix -> pickle_data_set: list-lenghts dont match. Error in data.")

    level_keys = gkdv.get_kdv('AvalancheDangerKDV').keys()
    size_keys = [v.Name for v in gkdv.get_kdv('DestructiveSizeKDV').values()]
    triggers_keys = [v.Name for v in gkdv.get_kdv('AvalTriggerSimpleKDV').values()]
    probability_keys = [v.Name for v in gkdv.get_kdv('AvalProbabilityKDV').values()]
    distribution_keys = [v.Name for v in gkdv.get_kdv('AvalPropagationKDV').values()]

    level_colors = ['0.5','#ccff66', '#ffff00', '#ff9900', '#ff0000', 'k']

    if use_ikke_gitt == False:
        level_keys.pop(0)
        size_keys.pop(0)
        triggers_keys.pop(0)
        probability_keys.pop(0)
        distribution_keys.pop(0)

        level_colors.pop(0)

    data_set = {'level': {'values': level_list, 'keys': level_keys, 'colors':level_colors},
                'size': {'values': size_list, 'keys': size_keys, 'colors':['0.7']},
                'trigger': {'values': trigger_list, 'keys': triggers_keys, 'colors':['0.7']},
                'probability': {'values': probability_list, 'keys': probability_keys, 'colors':['0.7']},
                'distribution': {'values': distribution_list, 'keys': distribution_keys, 'colors':['0.7']}}

    mp.pickle_anything(data_set, file_name)
def make_2015_16_plots(run_all=False):
    """Plots both observations pr observer and pr region for display on web page for the season 2015-16.
    Method includes a request for list of relevant observers.

    :return:
    """

    # list of months to be plotted
    all_months = []
    month = dt.date(2015,11,1)
    while month < dt.date.today():
        all_months.append(month)
        almost_next = month + dt.timedelta(days=35)
        month = dt.date(almost_next.year, almost_next.month, 1)

    # if not specified run only the last month
    if not run_all:
        if dt.date.today().day < 5 and len(all_months) > 1:
            last_months = all_months[-2:]
        else:
            last_months = [all_months[-1]]
    else:
        last_months = all_months

    # get a list of relevant observerst to plot and make plickle in the web-folder
    previous_observer_list = mp.unpickle_anything('{0}observerlist.pickle'.format(env.web_root_folder))
    observer_list = gm.get_observer_dict_for_2015_16_ploting()
    mp.pickle_anything(observer_list, '{0}observerlist.pickle'.format(env.web_root_folder))

    # if observer not in previous observer list, run all months else only run last months
    new_observers = {}
    for k,v in observer_list.iteritems():
        if k not in previous_observer_list.keys():
            new_observers[k] = v

    # Get all regions
    region_ids = []
    ForecastRegionKDV = gkdv.get_kdv('ForecastRegionKDV')
    for k, v in ForecastRegionKDV.iteritems():
        if 100 < k < 150 and v.IsActive is True:
            region_ids.append(v.ID)

    # run the stuff
    make_observer_plots(new_observers, all_months)
    make_observer_plots(previous_observer_list, last_months)
    make_region_plots(region_ids, last_months)

    return
Example #7
0
def pickle_warnings(regions, date_from, date_to, pickle_file_name):
    """
    All warnings and problems are selected from regObs or the avalanche api and neatly pickel'd for later use.
    This method also gets all warnings in english for the english main message.

    :param regions:            list []
    :param date_from:          string as 'yyyy-mm-dd'
    :param date_to:            string as 'yyyy-mm-dd'
    :param pickle_file_name:   filename including directory as string

    :return:
    """

    warnings = []

    # get all warning and problems for this region and then loop though them joining them on date
    for r in regions:
        warnings_no = gfa.get_warnings(r, date_from, date_to)
        warnings_en = gfa.get_warnings(r, date_from, date_to, lang_key=2)
        name = gro.get_forecast_region_name(r)
        problems = gro.get_problems_from_AvalancheWarnProblemV(name, r, date_from, date_to)

        print "{0} problems found for {1}".format(len(problems), name)

        # loop trough all the norwegian forecasts
        for i in range(0, len(warnings_no), 1):

            # add problems with same dates
            for j in range(0, len(problems), 1):

                if warnings_no[i].date == problems[j].date:
                    warnings_no[i].add_problem(problems[j])

            # add english main message with same dates
            for k in range(0, len(warnings_en), 1):

                if warnings_no[i].date == warnings_en[k].date:
                    warnings_no[i].set_main_message_en(warnings_en[k].main_message_en)
                    continue

        warnings = warnings + warnings_no

    mp.pickle_anything(warnings, pickle_file_name)
Example #8
0
def pickle_M3(data_set, config_file_name, pickle_m3_file_name):
    """Makes a list of elements matching the m3 matrix. Uses a configuration file as for the matrix elements and
     runs through all warnings adding occurances and danger level used at each combination of the matrix.

    :param data_set:
    :param pickle_m3_file_name:
    :return:
    """

    config_file_name = '{0}{1}'.format(env.input_folder, config_file_name)
    m3_elements = rf.read_configuration_file(config_file_name, M3Element)

    # read out the data_set and add to M3Elements
    for i in range(0, len(data_set['level']['values']), 1):

        size = data_set['size']['values'][i]
        if size is None:
            size = '0 - Ikke gitt'
            print 'runmatrix.py -> picke_M3 -> Warning: Encountered occurrence where avalanche size is None. Set to 0 - Ikke gitt.'
        trigger = data_set['trigger']['values'][i]
        probability = data_set['probability']['values'][i]
        distribution = data_set['distribution']['values'][i]

        for e in m3_elements:

            m3_size = e.avalanche_size
            m3_trigger = e.trigger
            m3_probability = e.probability
            m3_distribution = e.distribution

            if size in m3_size and trigger in m3_trigger and probability in m3_probability and distribution in m3_distribution:
                level = data_set['level']['values'][i]
                e.add_danger_level(level)

    # count all levels added (for debug only) for control and make some stats
    count = 0
    for e in m3_elements:
        count += len(e.danger_level_list)
        e.set_level_average()
        e.set_level_standard_dev()

    mp.pickle_anything(m3_elements, pickle_m3_file_name)
def get_incident_list(all_incidents, desired_damage_extent_kdv, pickle_file_name_2, make_new_incident_list):
    '''Each row in the incident list contains Incident and Forecast objects where
    date and forecast region match AND where incidents match the damage extent we wish to study.

    :param all_incidents:
    :param desired_damage_extent_kdv:
    :param pickle_file_name_2:
    :param make_new_incident_list:
    :return:
    '''

    if make_new_incident_list:
        incident_list = []
        for incident in all_incidents:
            if incident.DamageExtentTID in desired_damage_extent_kdv.keys():
                incident_list.append(IncidentAndForecasts(incident, all_forecasts))
        mp.pickle_anything(incident_list, pickle_file_name_2)
    else:
        incident_list = mp.unpickle_anything(pickle_file_name_2)

    return incident_list
def get_data(from_date, to_date, region_ids, pickle_file_name_1, get_new):
    '''Timeconsuming and inefficient. Not proud..

    :param from_date:
    :param to_date:
    :param region_ids:
    :param pickle_file_name_1:
    :param get_new:
    :return:
    '''
    if get_new:
        # get all data and save to pickle
        all_incidents = go.get_incident(from_date, to_date, region_ids=region_ids, geohazard_tid=10)
        all_forecasts = []
        for region_id in region_ids:
            all_forecasts += gd.get_forecasted_dangers(region_id, from_date, to_date, include_problems=True)
        mp.pickle_anything([all_forecasts, all_incidents], pickle_file_name_1)
    else:
        # load data from pickle
        all_forecasts, all_incidents = mp.unpickle_anything(pickle_file_name_1)

    return all_forecasts, all_incidents
Example #11
0
def get_kdv(view):
    '''Imports a view view from regObs and returns a dictionary with <key, value> = <ID, Name>
    An view is requested from the regObs api if the pickle file is older thatn 3 days.

    :param view:    [string]    kdv view
    :return dict:   {}          view as a dictionary

    Ex of use: aval_cause_kdv = get_kdv('AvalCauseKDV')
    Ex of url for returning values for IceCoverKDV in norwegian:
    http://api.nve.no/hydrology/regobs/v0.9.4/OData.svc/ForecastRegionKDV?$filter=Langkey%20eq%201%20&$format=json
    '''

    kdv_file_name = '{0}{1}.pickle'.format(env.local_storage, view)
    dict = {}

    if os.path.exists(kdv_file_name):

        max_file_age = 3
        # file_date_seconds = os.path.getctime(kdv_file_name)
        file_date_seconds = os.path.getmtime(kdv_file_name)
        file_date_datetime = dt.datetime.fromtimestamp(file_date_seconds)
        file_date_limit = dt.datetime.now() - dt.timedelta(days=max_file_age)

        if file_date_datetime < file_date_limit:
            print "getkdvelements.py -> get_kdv: Removing KDV from local storage: {0}".format(kdv_file_name)
            os.remove(kdv_file_name)
            ordered_dict = get_kdv(view)
            mp.pickle_anything(ordered_dict, kdv_file_name)
        else:
            # print "getkdvelements.py -> get_kdv: Getting KDV from local storage: {0}".format(kdv_file_name)
            ordered_dict = mp.unpickle_anything(kdv_file_name, print_message=False)

    else:

        filter = 'filter=Langkey%20eq%201'

        if 'TripTypeKDV' in view:
            filter = 'filter=LangKey%20eq%201'

        url = 'http://api.nve.no/hydrology/regobs/{0}/OData.svc/{1}?${2}&$format=json'\
            .format(env.api_version, view, filter)

        lang_key = 1

        print "getkdvelements.py -> get_kdv: Getting KDV from URL: {0}".format(url)
        kdv = requests.get(url).json()

        for a in kdv['d']['results']:
            try:
                sort_order = a["SortOrder"]
                is_active = a["IsActive"]

                if 'AvalCauseKDV' in url and 9 < int(a['ID']) < 26:      # this table gets special treatment
                    id = int(a["ID"])
                    name = fe.remove_norwegian_letters(a["Description"])
                    description = fe.remove_norwegian_letters(a["Name"])
                elif 'TripTypeKDV' in view:
                    id = int(a["TripTypeTID"])
                    name = fe.remove_norwegian_letters(a["Name"])
                    description = fe.remove_norwegian_letters(a["Descr"])
                else:
                    id = int(a["ID"])
                    name = fe.remove_norwegian_letters(a["Name"])
                    description = fe.remove_norwegian_letters(a["Description"])

                dict[id] = KDVelement(id, sort_order, is_active, name, description, lang_key)

            except (RuntimeError, TypeError, NameError):
                pass

        ordered_dict = collections.OrderedDict(sorted(dict.items()))
        mp.pickle_anything(ordered_dict, kdv_file_name)

    return ordered_dict
def make_forecaster_data(warnings, save_for_web=False):
    '''Make the forecaster dictionary with all the neccesary data.
    method also makes the dict needed fore the menu on the pythonanywhre website.

    :param warnings:
    :return:
    '''

    # get nicknames and ids to all regObs users. Get {id:nick} to all forecasters.
    observer_nicks = gm.get_observer_v()

    # Make dataset with dict {nick: Forecaster}. Add warnings to Forecaster object.
    # Note: A list of all forecaster names is all the keys in this dictionary
    forecaster_dict = {}
    for w in warnings:
        if w.nick not in forecaster_dict:
            forecaster_dict[w.nick] = Forecaster(w.nick)
            forecaster_dict[w.nick].add_warning(w)
        else:
            forecaster_dict[w.nick].add_warning(w)

    # need this below for forecasterstatisitics
    nowcast_lengths_all = []
    forecast_lengths_all = []
    danger_levels_all = []
    problems_pr_warning_all = []
    for w in warnings:
        nowcast_lengths_all.append(len(w.avalanche_nowcast))
        forecast_lengths_all.append(len(w.avalanche_forecast))
        danger_levels_all.append(w.danger_level)
        problems_pr_warning_all.append(len(w.avalanche_problems))

    # Add more data for forecaster objects in the dict
    for n, f in forecaster_dict.iteritems():

        # add # warnings made
        forecaster_dict[f.nick].add_warnings_count(len(f.warnings))
        for o_i, o_n in observer_nicks.iteritems():
            if o_n == f.nick:
                forecaster_dict[f.nick].add_observer_id(o_i)

        # find how many pr date
        dates = {}
        for w in f.warnings:
            if w.date not in dates:
                dates[w.date] = 1
            else:
                dates[w.date] += 1
        forecaster_dict[f.nick].add_dates(dates)

        # Add lists of dangerlevels, nowcastlengths, forecast lengths and problems.
        # for this forecaster and all and avarages.
        nowcast_lengths = []
        forecast_lengths = []
        danger_levels = []
        problems_pr_warning = []

        for w in f.warnings:
            nowcast_lengths.append(len(w.avalanche_nowcast))
            forecast_lengths.append(len(w.avalanche_forecast))
            danger_levels.append(w.danger_level)
            problems_pr_warning.append(len(w.avalanche_problems))

        forecaster_dict[f.nick].add_nowcast_lengths(nowcast_lengths, nowcast_lengths_all)
        forecaster_dict[f.nick].add_forecast_lengths(forecast_lengths, forecast_lengths_all)
        forecaster_dict[f.nick].add_danger_levels(danger_levels, danger_levels_all)
        forecaster_dict[f.nick].add_problems_pr_warning(problems_pr_warning, problems_pr_warning_all)

    # Save dict of forecasters for the website menu. Find where nick match forecasters.
    if save_for_web:
        forecaster_nicknid_dict = {-1:'_OVERSIKT ALLE_'}
        for o_i,o_n in observer_nicks.iteritems():
            for f_n, f_F in forecaster_dict.iteritems():
                if o_n == f_n:
                    forecaster_nicknid_dict[o_i] = o_n
        mp.pickle_anything(forecaster_nicknid_dict, '{0}forecasterlist.pickle'.format(env.web_root_folder))

    return forecaster_dict
Example #13
0
    #to_date = dt.date.today()

    ### get and make the data set
    # date_region, forecasted_dangers = step_1_make_data_set(region_id, from_date, to_date)
    # mp.pickle_anything([date_region, forecasted_dangers], '{0}runforavalancheactivity_step_1.pickle'.format(env.local_storage))
    #
    # ## Find the observaton of highest value pr region pr date
    # date_region, forecasted_dangers = mp.unpickle_anything('{0}runforavalancheactivity_step_1.pickle'.format(env.local_storage))
    # date_region = step_2_find_most_valued(date_region)
    # mp.pickle_anything([date_region, forecasted_dangers], '{0}runforavalancheactivity_step_2.pickle'.format(env.local_storage))
    # #
    ## ready to add to count elements
    date_region, forecasted_dangers = mp.unpickle_anything('{0}runforavalancheactivity_step_2.pickle'.format(env.local_storage))
    elements = rf.read_configuration_file('{0}aval_dl_configuration.csv'.format(env.input_folder), ActivityAndDanger)
    elements = step_3_count_occurances(date_region, elements)
    mp.pickle_anything([date_region, forecasted_dangers, elements], '{0}runforavalancheactivity_step_3.pickle'.format(env.local_storage))

    ### ready to plot?
    date_region, forecasted_dangers, elements = mp.unpickle_anything('{0}runforavalancheactivity_step_3.pickle'.format(env.local_storage))
    step_4_plot(date_region, forecasted_dangers, elements, '{0}Avalanches and dangers {1} to {2}'.format(env.plot_folder, from_date, to_date))

    total_a = 0
    total_aa = 0
    total_ds = 0
    for d in date_region:
        total_a += len(d.avalanche)
        total_aa += len(d.avalanche_activity)
        total_ds += len(d.danger_sign)

    total_obs = total_a + total_aa + total_ds
Example #14
0
            if 'Varsel' in d.source:
                forecast_danger.append(d)
            if d.nick is not None:
                if drift_nick in d.nick:
                    drift_svv_danger.append(d)

        aval_indexes = gm.get_avalanche_index(
            from_date, to_date,
            region_ids=region_ids)  #, nick_names=drift_nick)
        drift_svv_index = []
        for i in aval_indexes:
            if drift_nick in i.observation.NickName:
                drift_svv_index.append(i)

        mp.pickle_anything(
            [forecast_danger, drift_svv_danger, drift_svv_index],
            pickle_file_name)

    else:
        forecast_danger, drift_svv_danger, drift_svv_index = mp.unpickle_anything(
            pickle_file_name)

    # order and group by date:
    elrapp_data_list = []
    for fd in forecast_danger:
        ed = ElrappData(fd.date, fd.region_name)
        ed.set_danger_forecast(fd.danger_level_name)
        elrapp_data_list.append(ed)

    for dd in drift_svv_danger:
        for ed in elrapp_data_list:
        if 99 < k < 150 and v.IsActive is True:
            region_id.append(v.ID)

    ## The output
    plot_file_name = 'Danger level and danger sign 2015-16.png'

    ##################################### End of configuration ###################################

    pickle_file_name_1 = '{0}runlevelanddangersign part 1.pickle'.format(env.local_storage)
    pickle_file_name_2 = '{0}runlevelanddangersign part 2.pickle'.format(env.local_storage)

    if get_new:
        # get all data and save to pickle
        all_danger_levels = gd.get_all_dangers(region_id, from_date, to_date)
        all_danger_signs = go.get_danger_sign(from_date, to_date, region_ids=region_id, geohazard_tid=10)
        mp.pickle_anything([all_danger_levels, all_danger_signs], pickle_file_name_1)
    else:
        # load data from pickle
        all_danger_levels, all_danger_signs = mp.unpickle_anything(pickle_file_name_1)

    if make_new:
        # for counting days with danger levels
        level_count = []
        data = {1:[], 2:[], 3:[], 4:[], 5:[]}
        for dl in all_danger_levels:
            if dl.source == 'Varsel' and dl.danger_level is not 0:
                level_count.append(dl.danger_level)
                for ds in all_danger_signs:
                    if dl.date == ds.DtObsTime.date() and dl.region_name in ds.ForecastRegionName:
                        print '{0}'.format(dl.date)
                        data[dl.danger_level].append(fe.remove_norwegian_letters(ds.DangerSignName))
Example #16
0
    # list of all observerids and their nicks
    observer_nicks = get_observer_v()

    # only the whorthy are selected
    observers_dict_select = {}
    for k,v in observers_dict.iteritems():
        if v > 5:
            observers_dict_select[k] = observer_nicks[k]

    # order by nickname
    # sorted_observers_dict = sorted(observers_dict_select.items(), key=operator.itemgetter(1))

    return observers_dict_select


if __name__ == "__main__":

    # from_date = dt.date(2015, 12, 1)
    # from_date = dt.date.today()-dt.timedelta(days=60)
    # to_date = dt.date.today()+dt.timedelta(days=1)

    observer_list = get_observer_dict_for_2015_16_ploting()
    import makepickle as mp
    mp.pickle_anything(observer_list, '{0}observerlist.pickle'.format(env.web_root_folder))

    # observer_nicks = get_observer_v()
    # trips = get_trip(from_date, to_date, output='csv')
    # observers = get_observer_group_member(group_id=51, output='Dict')
    # registration = get_registration(from_date, to_date, geohazard_tid=10, ApplicationID="Web and app")

    a = 1
def step1_get_data(year, month, observer_id=None, region_id=None, get_new=True, make_pickle=False, data_description="no_description_given"):
    """Gets data for one month and prepares for plotting

    :param year:                [int]
    :param month:               [int]
    :param observer_id:         [int or list of ints]
    :param region_id            [int]
    :param get_new:             [bool] get data with a new request or use local pickle
    :param make_pickle:         [bool] only matters if getting new data, make a pickle in local storage
    :param data_description     [string] Custom description for naming pickle-files
    :return dates:              [list of DayData objects]

    """

    if data_description is not None:
        pickle_file_name = "{0}{1}_{2}{3:02d}.pickle".format(env.local_storage, data_description, year, month)
    else:
        if region_id is not None:
            pickle_file_name = "{0}runPlotRegionData_{1}_{2}{3:02d}.pickle".format(env.local_storage, region_id, year, month)
        else:
            if observer_id is not None:
                pickle_file_name = "{0}runPlotObserverData_{1}_{2}{3:02d}.pickle".format(env.local_storage, observer_id, year, month)
            else:
                print 'Need Observerid and/or forecastRegionTID to make this work.'
                return []

    first, last  = cal.monthrange(year, month)
    from_date = dt.date(year, month, 1)
    to_date = dt.date(year, month, last) + dt.timedelta(days=1)

    if get_new:
        all_observations = go.get_all_registrations(from_date, to_date, output='DataFrame', geohazard_tid=10,
                                                    observer_ids=observer_id, region_ids=region_id)

        # for all dates in the requested from-to interval
        dates = []
        for d in _get_dates(from_date, to_date, dt.timedelta(days=1)):

            if observer_id is not None and region_id is None:   # if only data for one observer
                dd = DayData(d, observer_id=observer_id)
            else:                                               # else whish to have data for some/all observervers in a region
                dd = DayData(d, region_id=region_id)

            obstyp = []
            regids = []
            nicks = []
            loc_pr_regid = {}
            obs_pr_regid = {}
            nic_pr_regid = {}

            # loop through all observations
            for i in all_observations.index:
                this_date = all_observations.iloc[i].DtObsTime.date()

                # append all observations where dates match
                if this_date == d:

                    regid = all_observations.iloc[i].RegID

                    # location on regid (only one location pr RegID)
                    if regid not in loc_pr_regid.keys():
                        loc_pr_regid[regid] = all_observations.iloc[i].ForecastRegionName

                    # get the nicname use on the regid (only one pr RegID)
                    if regid not in nic_pr_regid.keys():
                        nic_pr_regid[regid] = all_observations.iloc[i].NickName

                    # observations pr regid (might be more)
                    if regid not in obs_pr_regid.keys():
                        obs_pr_regid[regid] = [all_observations.iloc[i].RegistrationName]
                    else:
                        obs_pr_regid[regid].append(all_observations.iloc[i].RegistrationName)

                    # list of all observations on this date
                    if all_observations.iloc[i].RegistrationName == 'Bilde':
                        if all_observations.iloc[i].TypicalValue1 == 'Bilde av: Snoeprofil':
                            obstyp.append('Snoeprofil')
                        else:
                            obstyp.append(all_observations.iloc[i].RegistrationName)
                    else:
                        obstyp.append(all_observations.iloc[i].RegistrationName)

                    # list of all regids - this counts occurances
                    regids.append(int(all_observations.iloc[i].RegID))

                    # list of all observers nickanmes - this counts occurances
                    nicks.append(all_observations.iloc[i].NickName)

            # add to object for plotting
            dd.add_loc_pr_regid(loc_pr_regid)
            dd.add_obs_pr_regid(obs_pr_regid)
            dd.add_nic_pr_regid(nic_pr_regid)
            dd.add_observations(obstyp)
            dd.add_regids(regids)
            dd.add_nicks(nicks)
            dates.append(dd)

        if make_pickle:
            mp.pickle_anything(dates, pickle_file_name)

    else:
        dates = mp.unpickle_anything(pickle_file_name)

    return dates
Example #18
0
    l.close()


if __name__ == "__main__":

    # This makes a full report

    data_output_filename = '{0}Alle skredproblemer.csv'.format(env.output_folder)
    pickle_file_name = '{0}runavalancheproblems.pickle'.format(env.local_storage)

    region_ids = [118, 128, 117]

    # Get all regions
    region_ids = []
    ForecastRegionKDV = gkdv.get_kdv('ForecastRegionKDV')
    for k, v in ForecastRegionKDV.iteritems():
       if 99 < k < 150 and v.IsActive is True:
           region_ids.append(v.ID)

    from_date = dt.date(2012, 12, 31)
    to_date = dt.date(2015, 7, 1)

    data = gp.get_all_problems(region_ids, from_date, to_date)
    mp.pickle_anything(data, pickle_file_name)

    data = mp.unpickle_anything(pickle_file_name)
    save_problems_simple(data, data_output_filename)

    a = 1

Example #19
0
            m.occurrences, danger_levels, main_causes, cause_names, aval_types, main_message_no, main_message_en
        )

        l.write(s.encode(use_encoding))
    l.close()


if __name__ == "__main__":

    # regions_kdv = gkdv.get_kdv("ForecastRegionKDV")
    regions = list(range(106, 134))  # ForecastRegionTID = [106, 134> 106 is Alta, 133 is Salten

    date_from = "2014-12-01"
    date_to = "2015-06-01"

    # file names
    file_name_for_warnings_pickle = "{0}{1}".format(se.local_storage, "runForMainMessage warnings.pickle")
    file_name_for_main_messages_pickle = "{0}{1}".format(se.local_storage, "runForMainMessage main messages.pickle")
    file_name_for_main_messages_csv = "{0}{1}".format(se.output_folder, "Alle hovedbudskap.csv")

    ##### pickle the warnings and dataset with main messages
    pickle_warnings(regions, date_from, date_to, file_name_for_warnings_pickle)
    main_messages = select_messages_with_more(file_name_for_warnings_pickle)
    mp.pickle_anything(main_messages, file_name_for_main_messages_pickle)
    main_messages = mp.unpickle_anything(file_name_for_main_messages_pickle)

    # write to file
    save_main_messages_to_file(main_messages, file_name_for_main_messages_csv)

    a = 1
def get_node_list(pickle_file_name_3, make_new_node_list):
    '''Makes a list of NodesAndValues objects. All nodes get an object and relations between the nodes are
    calculated. Lots of looping.

    :param pickle_file_name_3:
    :param make_new_node_list:
    :return:
    '''

    if make_new_node_list:
        problem_kdv = {0: 'Ikke gitt',
                       3: 'Toerre loessnoeskred',
                       5: 'Vaate loessnoeskred',
                       7: 'Nysnoeflak',
                       10: 'Fokksnoe',
                       20: 'Nysnoe',
                       30: 'Vedvarende svakt lag',
                       37: 'Dypt vedvarende svakt lag',
                       40: 'Vaat snoe',
                       45: 'Vaate flakskred',
                       50: 'Glideskred'}

        cause_kdv = gkdv.get_kdv('AvalCauseKDV')
        danger_kdv = gkdv.get_kdv('AvalancheDangerKDV')
        activity_influenced_kdv = gkdv.get_kdv('ActivityInfluencedKDV')

        nodes_dict = {}
        id_counter = -1

        for cause_tid, cause_kdve in cause_kdv.iteritems():
            cause_name = cause_kdve.Name
            if 'kke gitt' in cause_name:
                cause_name = 'Svakt lag {0}'.format(cause_name)
            if cause_kdve.IsActive:
                id_counter += 1
                nodes_dict[cause_name] = id_counter

        for problem_tid, problem_name in problem_kdv.iteritems():
            if 'kke gitt' in problem_name:
                problem_name = 'Skredproblem {0}'.format(problem_name)
            id_counter += 1
            nodes_dict[problem_name] = id_counter

        for desired_damage_extent_tid, desired_damage_extent_name in desired_damage_extent_kdv.iteritems():
            if 'kke gitt' in desired_damage_extent_name:
                desired_damage_extent_name = 'Skadeomfang {0}'.format(desired_damage_extent_name)
            id_counter += 1
            nodes_dict[desired_damage_extent_name] = id_counter

        for activity_influenced_tid, activity_influenced_kdve in activity_influenced_kdv.iteritems():
            if activity_influenced_tid < 200:  # only snow
                activity_influenced_name = activity_influenced_kdve.Name
                if 'kke gitt' in activity_influenced_name:
                    activity_influenced_name = 'Aktivitet {0}'.format(activity_influenced_name)
                if activity_influenced_kdve.IsActive:
                    id_counter += 1
                    nodes_dict[activity_influenced_name] = id_counter

        for danger_tid, danger_kdve in danger_kdv.iteritems():
            danger_name = danger_kdve.Name
            if 'kke gitt' in danger_name:
                'Faregrad {0}'.format(danger_name)
            if danger_kdve.IsActive:
                id_counter += 1
                nodes_dict[danger_name] = id_counter

        make_nodes = True
        nodes_and_values = []
        print_counter = 0

        for i in incident_list:

            print 'Index {0} of 192 in incidentlist'.format(print_counter)
            print_counter += 1

            if i.forecast:
                cause = i.forecast.avalanche_problems[0].cause_name
                if 'kke gitt' in cause: cause = 'Svakt lag {0}'.format(cause)
                problem = i.forecast.avalanche_problems[0].main_cause
                if 'kke gitt' in problem: problem = 'Skredproblem {0}'.format(problem)

                # Loop through the cause and problem list.
                # If it is the first run make the nodes.
                # If the causes in the lists match what is in the list of acutal incidents, add one to the node.
                for cause_tid, cause_kdve in cause_kdv.iteritems():
                    if cause_kdve.IsActive:
                        cause_name = cause_kdve.Name
                        if 'kke gitt' in cause_name: cause_name = 'Svakt lag {0}'.format(cause_name)
                        for problem_tid, problem_name in problem_kdv.iteritems():
                            if 'kke gitt' in problem_name: problem_name = 'Skredproblem {0}'.format(problem_name)
                            if make_nodes:  # the run of the first item of incident_list covers all nodes
                                nodes_and_values.append(NodesAndValues(cause_name, nodes_dict[cause_name], problem_name,
                                                                       nodes_dict[problem_name]))
                            if cause in cause_name and problem in problem_name:
                                for nv in nodes_and_values:
                                    if cause in nv.node_name and problem in nv.target_name:
                                        nv.add_one()

                damage_extent = i.incident.DamageExtentName
                if 'kke gitt' in damage_extent: damage_extent = 'Skadeomfang {0}'.format(damage_extent)

                for problem_tid, problem_name in problem_kdv.iteritems():
                    if 'kke gitt' in problem_name:
                        problem_name = 'Skredproblem {0}'.format(problem_name)
                    for desired_damage_extent_tid, desired_damage_extent_name in desired_damage_extent_kdv.iteritems():
                        if 'kke gitt' in desired_damage_extent_name:
                            desired_damage_extent_name = 'Skadeomfang {0}'.format(desired_damage_extent_name)
                        if make_nodes:
                            nodes_and_values.append(
                                NodesAndValues(problem_name, nodes_dict[problem_name], desired_damage_extent_name,
                                               nodes_dict[desired_damage_extent_name]))
                        if problem in problem_name and damage_extent in desired_damage_extent_name:
                            for nv in nodes_and_values:
                                if problem in nv.node_name and damage_extent in nv.target_name:
                                    nv.add_one()

                activity_influenced = i.incident.ActivityInfluencedName
                if 'kke gitt' in activity_influenced: activity_influenced = 'Aktivitet {0}'.format(activity_influenced)

                for desired_damage_extent_tid, desired_damage_extent_name in desired_damage_extent_kdv.iteritems():
                    if 'kke gitt' in desired_damage_extent_name:
                        desired_damage_extent_name = 'Skadeomfang {0}'.format(desired_damage_extent_name)
                    for activity_influenced_tid, activity_influenced_kdve in activity_influenced_kdv.iteritems():
                        if activity_influenced_tid < 200:  # only snow
                            activity_influenced_name = activity_influenced_kdve.Name
                            if 'kke gitt' in activity_influenced_name:
                                activity_influenced_name = 'Aktivitet {0}'.format(activity_influenced_name)
                            if activity_influenced_kdve.IsActive:
                                if make_nodes:
                                    nodes_and_values.append(NodesAndValues(desired_damage_extent_name,
                                                                           nodes_dict[desired_damage_extent_name],
                                                                           activity_influenced_name,
                                                                           nodes_dict[activity_influenced_name]))
                                if desired_damage_extent_name in damage_extent and activity_influenced_name in activity_influenced:
                                    for nv in nodes_and_values:
                                        if desired_damage_extent_name in nv.node_name and activity_influenced_name in nv.target_name:
                                            nv.add_one()

                danger = i.forecast.danger_level_name
                if 'kke gitt' in danger: danger = 'Faregrad {0}'.format(danger)

                for activity_influenced_tid, activity_influenced_kdve in activity_influenced_kdv.iteritems():
                    if activity_influenced_tid < 200:
                        activity_influenced_name = activity_influenced_kdve.Name
                        if 'kke gitt' in activity_influenced_name:
                            activity_influenced_name = 'Aktivitet {0}'.format(activity_influenced_name)
                        if activity_influenced_kdve.IsActive:
                            for danger_tid, danger_kdve in danger_kdv.iteritems():
                                danger_name = danger_kdve.Name
                                if 'kke gitt' in danger_name:
                                    'Faregrad {0}'.format(danger_name)
                                if danger_kdve.IsActive:
                                    if make_nodes:
                                        nodes_and_values.append(
                                            NodesAndValues(activity_influenced_name,
                                                           nodes_dict[activity_influenced_name],
                                                           danger_name, nodes_dict[danger_name]))
                                    if activity_influenced_name in activity_influenced and danger_name in danger:
                                        for nv in nodes_and_values:
                                            if activity_influenced_name in nv.node_name and danger_name in nv.target_name:
                                                nv.add_one()

            make_nodes = False

        mp.pickle_anything(nodes_and_values, pickle_file_name_3)
    else:
        nodes_and_values = mp.unpickle_anything(pickle_file_name_3)

    return nodes_and_values
Example #21
0
    ##################################### End of configuration ###################################

    pickle_file_name_1 = '{0}runlevelanddangersign part 1.pickle'.format(
        env.local_storage)
    pickle_file_name_2 = '{0}runlevelanddangersign part 2.pickle'.format(
        env.local_storage)

    if get_new:
        # get all data and save to pickle
        all_danger_levels = gd.get_all_dangers(region_id, from_date, to_date)
        all_danger_signs = go.get_danger_sign(from_date,
                                              to_date,
                                              region_ids=region_id,
                                              geohazard_tid=10)
        mp.pickle_anything([all_danger_levels, all_danger_signs],
                           pickle_file_name_1)
    else:
        # load data from pickle
        all_danger_levels, all_danger_signs = mp.unpickle_anything(
            pickle_file_name_1)

    if make_new:
        # for counting days with danger levels
        level_count = []
        data = {1: [], 2: [], 3: [], 4: [], 5: []}
        for dl in all_danger_levels:
            if dl.source == 'Varsel' and dl.danger_level is not 0:
                level_count.append(dl.danger_level)
                for ds in all_danger_signs:
                    if dl.date == ds.DtObsTime.date(
                    ) and dl.region_name in ds.ForecastRegionName: