Пример #1
0
def upload_covid_all_forecasts(path_to_processed_model_forecasts):
    # meta info
    project_name = 'COVID-19 Forecasts'
    project_obj = None
    project_timezeros = []
    forecasts = os.listdir(path_to_processed_model_forecasts)
    conn = util.authenticate()

    # Get all existing timezeros in the project
    for project in conn.projects:
        if project.name == project_name:
            project_obj = project
            for timezero in project.timezeros:
                project_timezeros.append(timezero.timezero_date)
            break

    # Get model name
    separator = '-'
    dir_name = separator.join(
        forecasts[0].split(separator)[3:]).split('.csv')[0]
    metadata = metadata_dict_for_file(path_to_processed_model_forecasts +
                                      'metadata-' + dir_name + '.txt')
    model_name = metadata['model_name']
    model = [
        model for model in project_obj.models if model.name == model_name
    ][0]

    # Get names of existing forecasts to avoid re-upload
    existing_forecasts = [forecast.source for forecast in model.forecasts]

    for forecast in forecasts:

        # Skip if forecast is already on zoltar
        if forecast in existing_forecasts:
            continue

        # Skip metadata text file
        if '.txt' in forecast:
            continue

        with open(path_to_processed_model_forecasts + forecast) as fp:

            # Get timezero and create timezero on zoltar if not existed
            time_zero_date = forecast.split(dir_name)[0][:-1]
            if time_zero_date not in project_timezeros:
                try:
                    project_obj.create_timezero(time_zero_date)
                except Exception as ex:
                    print(ex)

            # Validate covid19 file
            errors_from_validation = validate_quantile_csv_file(
                path_to_processed_model_forecasts + forecast)

            # Upload forecast
            if "no errors" == errors_from_validation:
                quantile_json, error_from_transformation = json_io_dict_from_quantile_csv_file(
                    fp, COVID19_TARGET_NAMES, covid19_row_validator)
                if len(error_from_transformation) > 0:
                    print(error_from_transformation)
                else:
                    try:
                        util.upload_forecast(conn,
                                             quantile_json,
                                             forecast,
                                             project_name,
                                             model_name,
                                             time_zero_date,
                                             overwrite=False)
                    except Exception as ex:
                        print(ex)
            else:
                print(errors_from_validation)
            fp.close()
Пример #2
0
import yaml
import logging
import pickle
import hashlib
import json

logging.basicConfig(level=logging.DEBUG)

cwd_p = Path(__file__).parent.resolve()
all_forecasts = glob.glob('./data-processed/**/*-*.csv')
# pprint.pprint(all_forecasts)
# meta info
project_name = 'COVID-19 Forecasts'
project_obj = None
project_timezeros = []
conn = util.authenticate()
url = 'https://github.com/midas-network/covid19-scenario-modeling-hub/tree/master/data-processed/'

# all_forecasts = glob.glob('./data')
project_obj = [
    project for project in conn.projects if project.name == project_name
][0]
project_timezeros = [
    timezero.timezero_date for timezero in project_obj.timezeros
]
models = [model for model in project_obj.models]
model_abbrs = [model.abbreviation for model in models]
zoltar_forecasts = []
repo_forecasts = []