def make_m3_figs(forecaster_dict, nick, path=''):
    '''Makes m3 tables for each forecaster. Uses methods from the runmatrix module.

    :param forecaster_dict:
    :param nick:            is how I can select relevant warnings for this forecaster
    :param product_folder:  location where plots (endproduct) is saved
    :param project_folder:  many files generated; make project folder in product folder
    :return:
    '''

    import runmatrix as rm

    f = forecaster_dict[nick]
    # select only warnings for this forecaster
    one_forecaster_warnings = f.warnings

    # prepare dataset
    pickle_data_set_file_name = '{0}runforefollow data set {1}.pickle'.format(env.local_storage, f.observer_id)
    rm.pickle_data_set(one_forecaster_warnings, pickle_data_set_file_name, use_ikke_gitt=False)
    forecaster_data_set = mp.unpickle_anything(pickle_data_set_file_name)

    # prepare the m3 elementes (cell contents)
    pickle_m3_v2_file_name = '{0}runforefollow m3 {1}.pickle'.format(env.local_storage, f.observer_id)
    rm.pickle_M3(forecaster_data_set, 'matrixconfiguration.v2.csv', pickle_m3_v2_file_name)
    m3_v2_elements = mp.unpickle_anything(pickle_m3_v2_file_name)

    # plot
    plot_m3_v2_file_name = '{0}{1}_m3'.format(path, f.observer_id)
    rm.plot_m3_v2(m3_v2_elements, plot_m3_v2_file_name)

    return
def get_2015_16_warnings(how_to_get_data='Get new and dont pickle', pickle_file_name=None):
    '''

    :param hot_to_get_data:     'Get new and dont pickle', 'Get new and save pickle' or 'Load pickle'
    :param file_name:           Not needed if no pickles involved
    :return:
    '''

    if 'Get new' in how_to_get_data:

        from_date = dt.date(2015, 11, 30)
        #to_date = dt.date.today()
        to_date = dt.date(2016, 05, 31)

        region_ids = [117, 128]  # Trollheimen
        region_ids = gm.get_active_forecast_regions()

        all_warnings = []
        for region_id in region_ids:
            all_warnings += gd.get_forecasted_dangers(region_id, from_date, to_date, include_problems=True, include_ikke_vurdert=False)

        # Sort by date
        all_warnings = sorted(all_warnings, key=lambda AvalancheDanger: AvalancheDanger.date)

        if 'and save pickle' in how_to_get_data:
            mp.pickle_anything(all_warnings, pickle_file_name)

    elif 'Load pickle' in how_to_get_data:
        all_warnings = mp.unpickle_anything(pickle_warnings_file_name)

    else:
        all_warnings = 'No valid data retrival method given in get_2015_16_warnings.'

    return all_warnings
def get_data(region_id, start_date, end_date, data_from="request"):
    """Gets all the data needed in the plots and pickles it so that I don't need to do requests to make plots.

    :param region_id:       [int]    Region ID is an int as given i ForecastRegionKDV
    :param start_date:      [string] Start date. Data for this date is not included in requests from OData
    :param end_date:
    :param data_from:       [string] Default "request". Other options: "request and save" and "local storage"
    :return:
    """

    filename = "{3}dangerandproblemplot_id{0} {1}-{2}.pickle".format(region_id, start_date.strftime('%Y'), end_date.strftime('%y'), env.local_storage)

    if "request" in data_from:
        if end_date > dt.date(2014, 11, 01) and start_date > dt.date(2014, 11, 01): # Early years dont have this avalanche problem
            problems = gp.get_all_problems(region_id, start_date, end_date, add_danger_level=False)
        else:
            problems = []

        dangers = gd.get_all_dangers(region_id, start_date, end_date)

        if "request and save" in data_from:
            mp.pickle_anything([problems, dangers], filename)

    elif "local storage" in data_from:
        problems, dangers = mp.unpickle_anything(filename)

    else:
        print "rundagerandproblem.py -> get_data: unknown data handler."
        problems = None
        dangers = None


    return problems, dangers
def select_messages_with_more(pickle_warnings_file_name):
    """
    Method selects unique messages and adds to them the english name (english name that appeared on the first
    occurrence), adds the danger levels, main causes, causes, and avalanche types that are used to this main
    message. There is also a count of how many times the main text has occurred.

    :param pickle_warnings_file_name    filename to where the picle file with the warnings are located
    :return main_messages               list of MainMessage objects ordered by most occurrences first
    """

    warnings = mp.unpickle_anything(pickle_warnings_file_name)
    main_messages = []

    for w in warnings:

        message_no = fe.remove_norwegian_letters(w.main_message_no)
        message_no = more_text_cleanup(message_no)

        # if no content
        if message_no == "Ikke vurdert":
            continue

        if message_no == "":
            continue

        # if message exists in list append changes to it
        if message_no_is_in_list(message_no, main_messages):
            m = get_main_message_object(message_no, main_messages)
            m.add_occurrence()
            m.add_to_danger_levels(w.danger_level)

            for p in w.avalanche_problems:
                m.add_to_main_causes(p.main_cause)
                m.add_to_cause_names(p.cause_name)
                m.add_to_aval_types(p.aval_type)

        # if not append a new one
        else:
            new_m = MainMessage(message_no)
            new_m.add_main_message_en(w.main_message_en)
            new_m.add_to_danger_levels(w.danger_level)

            for p in w.avalanche_problems:
                new_m.add_to_main_causes(p.main_cause)
                new_m.add_to_cause_names(p.cause_name)
                new_m.add_to_aval_types(p.aval_type)

            main_messages.append(new_m)

    # sort on main_message_no
    main_messages.sort(key=lambda m: m.main_message_no, reverse=False)

    # sort on occurrences
    # main_messages.sort(key=lambda m: m.occurrences, reverse=True)

    return main_messages
def test_sort_observer_list():

    observer_dict = mp.unpickle_anything('{0}observerlist.pickle'.format(env.web_root_folder))
    observer_list_unsorted = [[k,v] for k,v in observer_dict.iteritems()]
    unknown_user = observer_list_unsorted[0]
    observer_list_unsorted.pop(0)
    observer_list = sorted(observer_list_unsorted, key=lambda nick: nick[1].lower())
    observer_list.insert(0, unknown_user)

    return
def make_2015_16_plots(run_all=False):
    """Plots both observations pr observer and pr region for display on web page for the season 2015-16.
    Method includes a request for list of relevant observers.

    :return:
    """

    # list of months to be plotted
    all_months = []
    month = dt.date(2015,11,1)
    while month < dt.date.today():
        all_months.append(month)
        almost_next = month + dt.timedelta(days=35)
        month = dt.date(almost_next.year, almost_next.month, 1)

    # if not specified run only the last month
    if not run_all:
        if dt.date.today().day < 5 and len(all_months) > 1:
            last_months = all_months[-2:]
        else:
            last_months = [all_months[-1]]
    else:
        last_months = all_months

    # get a list of relevant observerst to plot and make plickle in the web-folder
    previous_observer_list = mp.unpickle_anything('{0}observerlist.pickle'.format(env.web_root_folder))
    observer_list = gm.get_observer_dict_for_2015_16_ploting()
    mp.pickle_anything(observer_list, '{0}observerlist.pickle'.format(env.web_root_folder))

    # if observer not in previous observer list, run all months else only run last months
    new_observers = {}
    for k,v in observer_list.iteritems():
        if k not in previous_observer_list.keys():
            new_observers[k] = v

    # Get all regions
    region_ids = []
    ForecastRegionKDV = gkdv.get_kdv('ForecastRegionKDV')
    for k, v in ForecastRegionKDV.iteritems():
        if 100 < k < 150 and v.IsActive is True:
            region_ids.append(v.ID)

    # run the stuff
    make_observer_plots(new_observers, all_months)
    make_observer_plots(previous_observer_list, last_months)
    make_region_plots(region_ids, last_months)

    return
def get_incident_list(all_incidents, desired_damage_extent_kdv, pickle_file_name_2, make_new_incident_list):
    '''Each row in the incident list contains Incident and Forecast objects where
    date and forecast region match AND where incidents match the damage extent we wish to study.

    :param all_incidents:
    :param desired_damage_extent_kdv:
    :param pickle_file_name_2:
    :param make_new_incident_list:
    :return:
    '''

    if make_new_incident_list:
        incident_list = []
        for incident in all_incidents:
            if incident.DamageExtentTID in desired_damage_extent_kdv.keys():
                incident_list.append(IncidentAndForecasts(incident, all_forecasts))
        mp.pickle_anything(incident_list, pickle_file_name_2)
    else:
        incident_list = mp.unpickle_anything(pickle_file_name_2)

    return incident_list
def get_data(from_date, to_date, region_ids, pickle_file_name_1, get_new):
    '''Timeconsuming and inefficient. Not proud..

    :param from_date:
    :param to_date:
    :param region_ids:
    :param pickle_file_name_1:
    :param get_new:
    :return:
    '''
    if get_new:
        # get all data and save to pickle
        all_incidents = go.get_incident(from_date, to_date, region_ids=region_ids, geohazard_tid=10)
        all_forecasts = []
        for region_id in region_ids:
            all_forecasts += gd.get_forecasted_dangers(region_id, from_date, to_date, include_problems=True)
        mp.pickle_anything([all_forecasts, all_incidents], pickle_file_name_1)
    else:
        # load data from pickle
        all_forecasts, all_incidents = mp.unpickle_anything(pickle_file_name_1)

    return all_forecasts, all_incidents
Beispiel #9
0
                    drift_svv_danger.append(d)

        aval_indexes = gm.get_avalanche_index(
            from_date, to_date,
            region_ids=region_ids)  #, nick_names=drift_nick)
        drift_svv_index = []
        for i in aval_indexes:
            if drift_nick in i.observation.NickName:
                drift_svv_index.append(i)

        mp.pickle_anything(
            [forecast_danger, drift_svv_danger, drift_svv_index],
            pickle_file_name)

    else:
        forecast_danger, drift_svv_danger, drift_svv_index = mp.unpickle_anything(
            pickle_file_name)

    # order and group by date:
    elrapp_data_list = []
    for fd in forecast_danger:
        ed = ElrappData(fd.date, fd.region_name)
        ed.set_danger_forecast(fd.danger_level_name)
        elrapp_data_list.append(ed)

    for dd in drift_svv_danger:
        for ed in elrapp_data_list:
            if ed.date == dd.date and ed.region == dd.region_name:
                ed.set_danger_elrapp(dd.danger_level_name)

    for di in drift_svv_index:
        for ed in elrapp_data_list:
    ## The output
    plot_file_name = 'Danger level and danger sign 2015-16.png'

    ##################################### End of configuration ###################################

    pickle_file_name_1 = '{0}runlevelanddangersign part 1.pickle'.format(env.local_storage)
    pickle_file_name_2 = '{0}runlevelanddangersign part 2.pickle'.format(env.local_storage)

    if get_new:
        # get all data and save to pickle
        all_danger_levels = gd.get_all_dangers(region_id, from_date, to_date)
        all_danger_signs = go.get_danger_sign(from_date, to_date, region_ids=region_id, geohazard_tid=10)
        mp.pickle_anything([all_danger_levels, all_danger_signs], pickle_file_name_1)
    else:
        # load data from pickle
        all_danger_levels, all_danger_signs = mp.unpickle_anything(pickle_file_name_1)

    if make_new:
        # for counting days with danger levels
        level_count = []
        data = {1:[], 2:[], 3:[], 4:[], 5:[]}
        for dl in all_danger_levels:
            if dl.source == 'Varsel' and dl.danger_level is not 0:
                level_count.append(dl.danger_level)
                for ds in all_danger_signs:
                    if dl.date == ds.DtObsTime.date() and dl.region_name in ds.ForecastRegionName:
                        print '{0}'.format(dl.date)
                        data[dl.danger_level].append(fe.remove_norwegian_letters(ds.DangerSignName))
        mp.pickle_anything([data, level_count], pickle_file_name_2)
    else:
        data, level_count = mp.unpickle_anything(pickle_file_name_2)
            m.occurrences, danger_levels, main_causes, cause_names, aval_types, main_message_no, main_message_en
        )

        l.write(s.encode(use_encoding))
    l.close()


if __name__ == "__main__":

    # regions_kdv = gkdv.get_kdv("ForecastRegionKDV")
    regions = list(range(106, 134))  # ForecastRegionTID = [106, 134> 106 is Alta, 133 is Salten

    date_from = "2014-12-01"
    date_to = "2015-06-01"

    # file names
    file_name_for_warnings_pickle = "{0}{1}".format(se.local_storage, "runForMainMessage warnings.pickle")
    file_name_for_main_messages_pickle = "{0}{1}".format(se.local_storage, "runForMainMessage main messages.pickle")
    file_name_for_main_messages_csv = "{0}{1}".format(se.output_folder, "Alle hovedbudskap.csv")

    ##### pickle the warnings and dataset with main messages
    pickle_warnings(regions, date_from, date_to, file_name_for_warnings_pickle)
    main_messages = select_messages_with_more(file_name_for_warnings_pickle)
    mp.pickle_anything(main_messages, file_name_for_main_messages_pickle)
    main_messages = mp.unpickle_anything(file_name_for_main_messages_pickle)

    # write to file
    save_main_messages_to_file(main_messages, file_name_for_main_messages_csv)

    a = 1
Beispiel #12
0
        env.local_storage)
    pickle_file_name_2 = '{0}runlevelanddangersign part 2.pickle'.format(
        env.local_storage)

    if get_new:
        # get all data and save to pickle
        all_danger_levels = gd.get_all_dangers(region_id, from_date, to_date)
        all_danger_signs = go.get_danger_sign(from_date,
                                              to_date,
                                              region_ids=region_id,
                                              geohazard_tid=10)
        mp.pickle_anything([all_danger_levels, all_danger_signs],
                           pickle_file_name_1)
    else:
        # load data from pickle
        all_danger_levels, all_danger_signs = mp.unpickle_anything(
            pickle_file_name_1)

    if make_new:
        # for counting days with danger levels
        level_count = []
        data = {1: [], 2: [], 3: [], 4: [], 5: []}
        for dl in all_danger_levels:
            if dl.source == 'Varsel' and dl.danger_level is not 0:
                level_count.append(dl.danger_level)
                for ds in all_danger_signs:
                    if dl.date == ds.DtObsTime.date(
                    ) and dl.region_name in ds.ForecastRegionName:
                        print '{0}'.format(dl.date)
                        data[dl.danger_level].append(
                            fe.remove_norwegian_letters(ds.DangerSignName))
        mp.pickle_anything([data, level_count], pickle_file_name_2)
def step1_get_data(year, month, observer_id=None, region_id=None, get_new=True, make_pickle=False, data_description="no_description_given"):
    """Gets data for one month and prepares for plotting

    :param year:                [int]
    :param month:               [int]
    :param observer_id:         [int or list of ints]
    :param region_id            [int]
    :param get_new:             [bool] get data with a new request or use local pickle
    :param make_pickle:         [bool] only matters if getting new data, make a pickle in local storage
    :param data_description     [string] Custom description for naming pickle-files
    :return dates:              [list of DayData objects]

    """

    if data_description is not None:
        pickle_file_name = "{0}{1}_{2}{3:02d}.pickle".format(env.local_storage, data_description, year, month)
    else:
        if region_id is not None:
            pickle_file_name = "{0}runPlotRegionData_{1}_{2}{3:02d}.pickle".format(env.local_storage, region_id, year, month)
        else:
            if observer_id is not None:
                pickle_file_name = "{0}runPlotObserverData_{1}_{2}{3:02d}.pickle".format(env.local_storage, observer_id, year, month)
            else:
                print 'Need Observerid and/or forecastRegionTID to make this work.'
                return []

    first, last  = cal.monthrange(year, month)
    from_date = dt.date(year, month, 1)
    to_date = dt.date(year, month, last) + dt.timedelta(days=1)

    if get_new:
        all_observations = go.get_all_registrations(from_date, to_date, output='DataFrame', geohazard_tid=10,
                                                    observer_ids=observer_id, region_ids=region_id)

        # for all dates in the requested from-to interval
        dates = []
        for d in _get_dates(from_date, to_date, dt.timedelta(days=1)):

            if observer_id is not None and region_id is None:   # if only data for one observer
                dd = DayData(d, observer_id=observer_id)
            else:                                               # else whish to have data for some/all observervers in a region
                dd = DayData(d, region_id=region_id)

            obstyp = []
            regids = []
            nicks = []
            loc_pr_regid = {}
            obs_pr_regid = {}
            nic_pr_regid = {}

            # loop through all observations
            for i in all_observations.index:
                this_date = all_observations.iloc[i].DtObsTime.date()

                # append all observations where dates match
                if this_date == d:

                    regid = all_observations.iloc[i].RegID

                    # location on regid (only one location pr RegID)
                    if regid not in loc_pr_regid.keys():
                        loc_pr_regid[regid] = all_observations.iloc[i].ForecastRegionName

                    # get the nicname use on the regid (only one pr RegID)
                    if regid not in nic_pr_regid.keys():
                        nic_pr_regid[regid] = all_observations.iloc[i].NickName

                    # observations pr regid (might be more)
                    if regid not in obs_pr_regid.keys():
                        obs_pr_regid[regid] = [all_observations.iloc[i].RegistrationName]
                    else:
                        obs_pr_regid[regid].append(all_observations.iloc[i].RegistrationName)

                    # list of all observations on this date
                    if all_observations.iloc[i].RegistrationName == 'Bilde':
                        if all_observations.iloc[i].TypicalValue1 == 'Bilde av: Snoeprofil':
                            obstyp.append('Snoeprofil')
                        else:
                            obstyp.append(all_observations.iloc[i].RegistrationName)
                    else:
                        obstyp.append(all_observations.iloc[i].RegistrationName)

                    # list of all regids - this counts occurances
                    regids.append(int(all_observations.iloc[i].RegID))

                    # list of all observers nickanmes - this counts occurances
                    nicks.append(all_observations.iloc[i].NickName)

            # add to object for plotting
            dd.add_loc_pr_regid(loc_pr_regid)
            dd.add_obs_pr_regid(obs_pr_regid)
            dd.add_nic_pr_regid(nic_pr_regid)
            dd.add_observations(obstyp)
            dd.add_regids(regids)
            dd.add_nicks(nicks)
            dates.append(dd)

        if make_pickle:
            mp.pickle_anything(dates, pickle_file_name)

    else:
        dates = mp.unpickle_anything(pickle_file_name)

    return dates
Beispiel #14
0
'''


__author__ = 'kmu'
'''

regions = [129]
date_from = "2015-12-01"
date_to = "2016-06-01"
pickle_file_name = "tamok_2015_2016.pck"


# pickle_warnings(regions, date_from, date_to, pickle_file_name)


data_set = mp.unpickle_anything(pickle_file_name)

"""https://automatetheboringstuff.com/chapter12/"""
wb = xl.Workbook()
type(wb)

#wb.get_sheet_names()

sheet = wb.active
sheet.title = 'Tamok 2015_2016'
print(wb.get_sheet_names())

print(data_set[7].danger_level_name)

sheet.cell(row=1, column=1, value="Region name")
sheet.cell(row=1, column=2, value="Date")
def get_kdv(view):
    '''Imports a view view from regObs and returns a dictionary with <key, value> = <ID, Name>
    An view is requested from the regObs api if the pickle file is older thatn 3 days.

    :param view:    [string]    kdv view
    :return dict:   {}          view as a dictionary

    Ex of use: aval_cause_kdv = get_kdv('AvalCauseKDV')
    Ex of url for returning values for IceCoverKDV in norwegian:
    http://api.nve.no/hydrology/regobs/v0.9.4/OData.svc/ForecastRegionKDV?$filter=Langkey%20eq%201%20&$format=json
    '''

    kdv_file_name = '{0}{1}.pickle'.format(env.local_storage, view)
    dict = {}

    if os.path.exists(kdv_file_name):

        max_file_age = 3
        # file_date_seconds = os.path.getctime(kdv_file_name)
        file_date_seconds = os.path.getmtime(kdv_file_name)
        file_date_datetime = dt.datetime.fromtimestamp(file_date_seconds)
        file_date_limit = dt.datetime.now() - dt.timedelta(days=max_file_age)

        if file_date_datetime < file_date_limit:
            print "getkdvelements.py -> get_kdv: Removing KDV from local storage: {0}".format(kdv_file_name)
            os.remove(kdv_file_name)
            ordered_dict = get_kdv(view)
            mp.pickle_anything(ordered_dict, kdv_file_name)
        else:
            # print "getkdvelements.py -> get_kdv: Getting KDV from local storage: {0}".format(kdv_file_name)
            ordered_dict = mp.unpickle_anything(kdv_file_name, print_message=False)

    else:

        filter = 'filter=Langkey%20eq%201'

        if 'TripTypeKDV' in view:
            filter = 'filter=LangKey%20eq%201'

        url = 'http://api.nve.no/hydrology/regobs/{0}/OData.svc/{1}?${2}&$format=json'\
            .format(env.api_version, view, filter)

        lang_key = 1

        print "getkdvelements.py -> get_kdv: Getting KDV from URL: {0}".format(url)
        kdv = requests.get(url).json()

        for a in kdv['d']['results']:
            try:
                sort_order = a["SortOrder"]
                is_active = a["IsActive"]

                if 'AvalCauseKDV' in url and 9 < int(a['ID']) < 26:      # this table gets special treatment
                    id = int(a["ID"])
                    name = fe.remove_norwegian_letters(a["Description"])
                    description = fe.remove_norwegian_letters(a["Name"])
                elif 'TripTypeKDV' in view:
                    id = int(a["TripTypeTID"])
                    name = fe.remove_norwegian_letters(a["Name"])
                    description = fe.remove_norwegian_letters(a["Descr"])
                else:
                    id = int(a["ID"])
                    name = fe.remove_norwegian_letters(a["Name"])
                    description = fe.remove_norwegian_letters(a["Description"])

                dict[id] = KDVelement(id, sort_order, is_active, name, description, lang_key)

            except (RuntimeError, TypeError, NameError):
                pass

        ordered_dict = collections.OrderedDict(sorted(dict.items()))
        mp.pickle_anything(ordered_dict, kdv_file_name)

    return ordered_dict
def get_node_list(pickle_file_name_3, make_new_node_list):
    '''Makes a list of NodesAndValues objects. All nodes get an object and relations between the nodes are
    calculated. Lots of looping.

    :param pickle_file_name_3:
    :param make_new_node_list:
    :return:
    '''

    if make_new_node_list:
        problem_kdv = {0: 'Ikke gitt',
                       3: 'Toerre loessnoeskred',
                       5: 'Vaate loessnoeskred',
                       7: 'Nysnoeflak',
                       10: 'Fokksnoe',
                       20: 'Nysnoe',
                       30: 'Vedvarende svakt lag',
                       37: 'Dypt vedvarende svakt lag',
                       40: 'Vaat snoe',
                       45: 'Vaate flakskred',
                       50: 'Glideskred'}

        cause_kdv = gkdv.get_kdv('AvalCauseKDV')
        danger_kdv = gkdv.get_kdv('AvalancheDangerKDV')
        activity_influenced_kdv = gkdv.get_kdv('ActivityInfluencedKDV')

        nodes_dict = {}
        id_counter = -1

        for cause_tid, cause_kdve in cause_kdv.iteritems():
            cause_name = cause_kdve.Name
            if 'kke gitt' in cause_name:
                cause_name = 'Svakt lag {0}'.format(cause_name)
            if cause_kdve.IsActive:
                id_counter += 1
                nodes_dict[cause_name] = id_counter

        for problem_tid, problem_name in problem_kdv.iteritems():
            if 'kke gitt' in problem_name:
                problem_name = 'Skredproblem {0}'.format(problem_name)
            id_counter += 1
            nodes_dict[problem_name] = id_counter

        for desired_damage_extent_tid, desired_damage_extent_name in desired_damage_extent_kdv.iteritems():
            if 'kke gitt' in desired_damage_extent_name:
                desired_damage_extent_name = 'Skadeomfang {0}'.format(desired_damage_extent_name)
            id_counter += 1
            nodes_dict[desired_damage_extent_name] = id_counter

        for activity_influenced_tid, activity_influenced_kdve in activity_influenced_kdv.iteritems():
            if activity_influenced_tid < 200:  # only snow
                activity_influenced_name = activity_influenced_kdve.Name
                if 'kke gitt' in activity_influenced_name:
                    activity_influenced_name = 'Aktivitet {0}'.format(activity_influenced_name)
                if activity_influenced_kdve.IsActive:
                    id_counter += 1
                    nodes_dict[activity_influenced_name] = id_counter

        for danger_tid, danger_kdve in danger_kdv.iteritems():
            danger_name = danger_kdve.Name
            if 'kke gitt' in danger_name:
                'Faregrad {0}'.format(danger_name)
            if danger_kdve.IsActive:
                id_counter += 1
                nodes_dict[danger_name] = id_counter

        make_nodes = True
        nodes_and_values = []
        print_counter = 0

        for i in incident_list:

            print 'Index {0} of 192 in incidentlist'.format(print_counter)
            print_counter += 1

            if i.forecast:
                cause = i.forecast.avalanche_problems[0].cause_name
                if 'kke gitt' in cause: cause = 'Svakt lag {0}'.format(cause)
                problem = i.forecast.avalanche_problems[0].main_cause
                if 'kke gitt' in problem: problem = 'Skredproblem {0}'.format(problem)

                # Loop through the cause and problem list.
                # If it is the first run make the nodes.
                # If the causes in the lists match what is in the list of acutal incidents, add one to the node.
                for cause_tid, cause_kdve in cause_kdv.iteritems():
                    if cause_kdve.IsActive:
                        cause_name = cause_kdve.Name
                        if 'kke gitt' in cause_name: cause_name = 'Svakt lag {0}'.format(cause_name)
                        for problem_tid, problem_name in problem_kdv.iteritems():
                            if 'kke gitt' in problem_name: problem_name = 'Skredproblem {0}'.format(problem_name)
                            if make_nodes:  # the run of the first item of incident_list covers all nodes
                                nodes_and_values.append(NodesAndValues(cause_name, nodes_dict[cause_name], problem_name,
                                                                       nodes_dict[problem_name]))
                            if cause in cause_name and problem in problem_name:
                                for nv in nodes_and_values:
                                    if cause in nv.node_name and problem in nv.target_name:
                                        nv.add_one()

                damage_extent = i.incident.DamageExtentName
                if 'kke gitt' in damage_extent: damage_extent = 'Skadeomfang {0}'.format(damage_extent)

                for problem_tid, problem_name in problem_kdv.iteritems():
                    if 'kke gitt' in problem_name:
                        problem_name = 'Skredproblem {0}'.format(problem_name)
                    for desired_damage_extent_tid, desired_damage_extent_name in desired_damage_extent_kdv.iteritems():
                        if 'kke gitt' in desired_damage_extent_name:
                            desired_damage_extent_name = 'Skadeomfang {0}'.format(desired_damage_extent_name)
                        if make_nodes:
                            nodes_and_values.append(
                                NodesAndValues(problem_name, nodes_dict[problem_name], desired_damage_extent_name,
                                               nodes_dict[desired_damage_extent_name]))
                        if problem in problem_name and damage_extent in desired_damage_extent_name:
                            for nv in nodes_and_values:
                                if problem in nv.node_name and damage_extent in nv.target_name:
                                    nv.add_one()

                activity_influenced = i.incident.ActivityInfluencedName
                if 'kke gitt' in activity_influenced: activity_influenced = 'Aktivitet {0}'.format(activity_influenced)

                for desired_damage_extent_tid, desired_damage_extent_name in desired_damage_extent_kdv.iteritems():
                    if 'kke gitt' in desired_damage_extent_name:
                        desired_damage_extent_name = 'Skadeomfang {0}'.format(desired_damage_extent_name)
                    for activity_influenced_tid, activity_influenced_kdve in activity_influenced_kdv.iteritems():
                        if activity_influenced_tid < 200:  # only snow
                            activity_influenced_name = activity_influenced_kdve.Name
                            if 'kke gitt' in activity_influenced_name:
                                activity_influenced_name = 'Aktivitet {0}'.format(activity_influenced_name)
                            if activity_influenced_kdve.IsActive:
                                if make_nodes:
                                    nodes_and_values.append(NodesAndValues(desired_damage_extent_name,
                                                                           nodes_dict[desired_damage_extent_name],
                                                                           activity_influenced_name,
                                                                           nodes_dict[activity_influenced_name]))
                                if desired_damage_extent_name in damage_extent and activity_influenced_name in activity_influenced:
                                    for nv in nodes_and_values:
                                        if desired_damage_extent_name in nv.node_name and activity_influenced_name in nv.target_name:
                                            nv.add_one()

                danger = i.forecast.danger_level_name
                if 'kke gitt' in danger: danger = 'Faregrad {0}'.format(danger)

                for activity_influenced_tid, activity_influenced_kdve in activity_influenced_kdv.iteritems():
                    if activity_influenced_tid < 200:
                        activity_influenced_name = activity_influenced_kdve.Name
                        if 'kke gitt' in activity_influenced_name:
                            activity_influenced_name = 'Aktivitet {0}'.format(activity_influenced_name)
                        if activity_influenced_kdve.IsActive:
                            for danger_tid, danger_kdve in danger_kdv.iteritems():
                                danger_name = danger_kdve.Name
                                if 'kke gitt' in danger_name:
                                    'Faregrad {0}'.format(danger_name)
                                if danger_kdve.IsActive:
                                    if make_nodes:
                                        nodes_and_values.append(
                                            NodesAndValues(activity_influenced_name,
                                                           nodes_dict[activity_influenced_name],
                                                           danger_name, nodes_dict[danger_name]))
                                    if activity_influenced_name in activity_influenced and danger_name in danger:
                                        for nv in nodes_and_values:
                                            if activity_influenced_name in nv.node_name and danger_name in nv.target_name:
                                                nv.add_one()

            make_nodes = False

        mp.pickle_anything(nodes_and_values, pickle_file_name_3)
    else:
        nodes_and_values = mp.unpickle_anything(pickle_file_name_3)

    return nodes_and_values
    from_date = dt.date(2014, 11, 30)
    to_date = dt.date(2015, 6, 1)
    #to_date = dt.date.today()

    ### get and make the data set
    # date_region, forecasted_dangers = step_1_make_data_set(region_id, from_date, to_date)
    # mp.pickle_anything([date_region, forecasted_dangers], '{0}runforavalancheactivity_step_1.pickle'.format(env.local_storage))
    #
    # ## Find the observaton of highest value pr region pr date
    # date_region, forecasted_dangers = mp.unpickle_anything('{0}runforavalancheactivity_step_1.pickle'.format(env.local_storage))
    # date_region = step_2_find_most_valued(date_region)
    # mp.pickle_anything([date_region, forecasted_dangers], '{0}runforavalancheactivity_step_2.pickle'.format(env.local_storage))
    # #
    ## ready to add to count elements
    date_region, forecasted_dangers = mp.unpickle_anything('{0}runforavalancheactivity_step_2.pickle'.format(env.local_storage))
    elements = rf.read_configuration_file('{0}aval_dl_configuration.csv'.format(env.input_folder), ActivityAndDanger)
    elements = step_3_count_occurances(date_region, elements)
    mp.pickle_anything([date_region, forecasted_dangers, elements], '{0}runforavalancheactivity_step_3.pickle'.format(env.local_storage))

    ### ready to plot?
    date_region, forecasted_dangers, elements = mp.unpickle_anything('{0}runforavalancheactivity_step_3.pickle'.format(env.local_storage))
    step_4_plot(date_region, forecasted_dangers, elements, '{0}Avalanches and dangers {1} to {2}'.format(env.plot_folder, from_date, to_date))

    total_a = 0
    total_aa = 0
    total_ds = 0
    for d in date_region:
        total_a += len(d.avalanche)
        total_aa += len(d.avalanche_activity)
        total_ds += len(d.danger_sign)
Beispiel #18
0
    pickle_data_set_file_name = '{0}{1}'.format(env.local_storage, 'runForMatrix data set.pickle')

    pickle_m3_file_name = '{0}{1}'.format(env.local_storage, 'runForMatix m3.pickle')
    plot_m3_file_name = '{0}m3 {1}-{2}'.format(env.plot_folder, date_from[0:4], date_to[2:4])

    pickle_m3_v2_file_name = '{0}{1}'.format(env.local_storage, 'runForMatix m3.v2.pickle')
    plot_m3_v2_file_name = '{0}m3 {1}-{2}.v2'.format(env.plot_folder, date_from[0:4], date_to[2:4])


    ######################################################################################
    ####### With something pickled you don't need to read on the api all the time ########
    #
    # pickle_warnings(regions, date_from, date_to, pickle_warnings_file_name)
    # warnings = mp.unpickle_anything(pickle_warnings_file_name)
    # pickle_data_set(warnings, pickle_data_set_file_name, use_ikke_gitt=False)
    data_set = mp.unpickle_anything(pickle_data_set_file_name)
    #
    ######################################################################################


    # plot_all_histograms(data_set, date_from, date_to, warnings)

    # pickle_M3(data_set, 'matrixconfiguration.csv', pickle_m3_file_name)
    # m3_elements = mp.unpickle_anything(pickle_m3_file_name)
    # plot_m3(m3_elements, plot_m3_file_name, file_ext=".png")

    pickle_M3(data_set, 'matrixconfiguration.v2.csv', pickle_m3_v2_file_name)
    m3_v2_elements = mp.unpickle_anything(pickle_m3_v2_file_name)
    plot_m3_v2(m3_v2_elements, plot_m3_v2_file_name)

            problem_combined = fe.add_norwegian_letters(problem_combined)

            if (region != "") and (region != "Hemsedal Skisenter"):
                # add norwegian letters
                s = u'{0}\t{1}\t{2}\t{3}\t{4}\n'.format(
                    date,
                    region,
                    danger_level,
                    danger_level_name,
                    problem_combined)

                l.write(s.encode(use_encoding))
    l.close()


if __name__ == "__main__":

    # file names
    file_name_for_warnings_pickle = '{0}{1}'.format(se.local_storage, 'runForMainMessage warnings.pickle')
    file_name_for_all_danger_levels_csv = '{0}{1}'.format(se.output_folder, 'Alle varslede faregrader.csv')
    file_name_for_all_danger_and_problems_csv = '{0}{1}'.format(se.output_folder, 'Alle varslede faregrader og problemer.csv')

    # NOTE!! Warnings with problems found in pickle file for main messages. If update needed, use pickle_warnings
    # method in runmainmessage.py
    warnings = mp.unpickle_anything(file_name_for_warnings_pickle)

    # write to files
    save_danger_levels_to_file(warnings, file_name_for_all_danger_levels_csv)
    save_danger_and_problem_to_file(warnings, file_name_for_all_danger_and_problems_csv)

    a = 1