Exemple #1
0
def save_custom(meid):
    save_custom_results(meid=meid, description='{description}',
                        input_dir="/{FILEPATH}/"+str(meid), years=[{YEAR IDS}],
                        sexes=[{SEX IDS}], mark_best=True,
                        in_counts=False, env="prod",
                        custom_file_pattern="{measure_id}_{location_id}.h5",
                        h5_tablename='draws', gbd_round={GBD ROUND})
Exemple #2
0
def save_worker(meid, description, input_dir):
    save_custom_results(meid=meid,
                        description=description,
                        input_dir=input_dir,
                        env='prod',
                        mark_best=True,
                        custom_file_pattern='{location_id}.h5',
                        h5_tablename='draws')
parser.add_argument("me_id", help="The me_id to upload", type=int)
parser.add_argument("cause_name",
                    help="The name of cause group for this upload")
parser.add_argument(
    "model_version_ids",
    help="The model versions used to calculate squeeze proportions")

args = parser.parse_args()
me_id = args.me_id
cause_name = args.cause_name
model_version_ids = args.model_version_ids

directory = "{root}/{proc}".format(root=root, proc=cause_name)

year_ids = [1990, 1995, 2000, 2005, 2010, 2016]

description = "Used model_version_ids " + model_version_ids + "."

upload_dir = os.path.join(directory, str(me_id))
sexes = [1, 2]
al.save_custom_results(meid=me_id,
                       description=description,
                       input_dir=upload_dir,
                       sexes=sexes,
                       birth_prev=True,
                       mark_best=True,
                       env='prod',
                       years=year_ids,
                       custom_file_pattern="{year_id}.h5",
                       h5_tablename="data")
Exemple #4
0
# this part uploads the results of the HIV split to the epi database

from adding_machine import agg_locations as al
import argparse

parser = argparse.ArgumentParser()
parser.add_argument("me_id", help="The me_id to upload", type=int)
parser.add_argument("save_dir", help="upload directory", type=str)
args = parser.parse_args()
me_id = args.me_id
save_dir = args.save_dir

year_ids = [{YEAR IDS}]
description = 'Proportions calculated from anemia casual attribution'
sexes = [{SEX IDS}]
al.save_custom_results(meid=me_id, description=description,
                       input_dir=save_dir,
                       sexes=sexes, mark_best=True, env={ENV},
                       years=year_ids, custom_file_pattern='{year_id}.h5',
                       h5_tablename="draws")
    locations = get_most_detailed(location_set, gbd_round)
    years = [1990, 1995, 2000, 2005, 2010, 2016]
    ages = [164]
    sexes = [1, 2]
    measure = 5
    upload_me = 15803

    # grab the u_2500 birth prevalence
    prev_df = draws(source='dismod',
                    gbd_ids={"modelable_entity_ids": [me_id]},
                    location_ids=locations, year_ids=years,
                    age_group_ids=ages, sex_ids=sexes,
                    status='best', measure_ids=[measure],
                    gbd_round_id=gbd_round)
    prev_df = index_draws_by_demographics(prev_df)

    def mapping(x):
        y = predict_for_simple_ols(x, mean_parameters, cov_matrix)
        return y

    mean_weight_df = prev_df.applymap(mapping)
    mean_weight_df['age_group_id'] = 2
    save_to_hdf(mean_weight_df, savefile)
    description = ('Estimate of mean birth weight from simple linear'
                   'regression. Units in grams')
    al.save_custom_results(meid=upload_me, description=description,
                           input_dir=output_dir,
                           sexes=sexes, mark_best=True, env='prod',
                           years=years, custom_file_pattern="all_draws.h5",
                           h5_tablename="draws")
Exemple #6
0
from adding_machine import agg_locations as al
import argparse

parser = argparse.ArgumentParser()
parser.add_argument("me_id", help="The me_id to upload", type=int)
parser.add_argument(
    "model_version_ids",
    help="The model versions of the input me_ids used in this custom code",
    type=str)
parser.add_argument("out_dir", help="upload directory", type=str)
args = parser.parse_args()
me_id = args.me_id
model_version_ids = args.model_version_ids
out_dir = args.out_dir

year_ids = [1990, 1995, 2000, 2005, 2010, 2016]
if me_id == 16535:
    description = "obstetric fistula DisMod {}".format(model_version_ids)
    sexes = [1, 2]
else:
    description = 'applied live births to incidence; applied duration'
    sexes = [2]
al.save_custom_results(meid=me_id,
                       description=description,
                       input_dir=out_dir,
                       sexes=sexes,
                       mark_best=True,
                       env='prod',
                       years=year_ids)
Exemple #7
0
    def upload_to_me(save_dir, in_path, cause_id, cause_name, me_id, write_hdf,
                     save_Results, send_Slack, token, channel):
        """ upload using save_results """

        # out file name
        out_file = "5_draws.h5"
        out_hdf = save_dir + out_file

        # table name
        h5_tablename = "draws"

        # get the time
        time_stamp = UploadToME.timestamp()

        # important columns
        save_cols = [
            'measure_id', 'location_id', 'year_id', 'age_group_id', 'sex_id'
        ]

        years = range(1990, 2015, 5) + [2016]
        sexes = [1, 2]

        # if the user asked to write an HDF for this etiology

        if write_hdf == "YES":
            # draw columns
            draw_cols = (['draw_%s' % i for i in range(0, 1000)])

            # get the locations
            locations_df = get_location_metadata(location_set_id=9)

            # filter out locations not used in in Epi and non-admin0 locations
            DEMOGRAPHICS = list(
                get_demographics(gbd_team='epi')['location_ids'])
            LOCATIONS = locations_df.query('location_id in {}'.format(
                (DEMOGRAPHICS)))[['location_id']]

            # location IDs
            LOCATION_IDS = LOCATIONS.location_id.unique()

            location_count = 0
            results_list = []
            for location_id in LOCATION_IDS:
                location_count += 1
                try:
                    df = pd.read_csv(
                        "{in_path}prevalence/corr_prev_draws_{location_id}.csv"
                        .format(in_path=in_path, location_id=location_id))
                    # filter to the one etiology being saved
                    temp = df.query('cause_id == {}'.format(cause_id)).copy()

                    # drop all unnecessary columns
                    temp = temp[save_cols + draw_cols]

                    # append the new df to the rest
                    results_list.append(temp)
                except:
                    print "Missing location at {location_id} for {cause_id}".format(
                        location_id=location_id, cause_id=cause_id)
                    if send_Slack == "YES":
                        message = "Missing location at {}".format(location_id)
                        slack.chat.post_message(channel, message)

                print "		results for {} locations appended.".format(
                    location_count)
            results = pd.concat(results_list)
            results.reset_index(inplace=True)
            results.drop('index', axis=1, inplace=True)

            # Make sure there aren't any negative values (This will happen where Chagas is endemic)
            results[results < 0] = 0

            # save the results to a HDF
            results.to_hdf(out_hdf,
                           h5_tablename,
                           format='table',
                           data_columns=save_cols)

        #if the user asked to save the HDF for this etiology using "save_results"
        if save_Results == "YES":
            description = "heart failure due to {cause_name}; new upload {time_stamp}; corrected-IHD proportions.".format(
                cause_name=cause_name, time_stamp=time_stamp)
            al.save_custom_results(me_id,
                                   description,
                                   save_dir,
                                   years=years,
                                   sexes=sexes,
                                   mark_best="best",
                                   in_counts=False,
                                   env="prod",
                                   custom_file_pattern=out_file,
                                   h5_tablename=h5_tablename,
                                   gbd_round=2016)