Exemplo n.º 1
0
    def _most_detailed_NE_injuries(self):
        nemat = self.injury_inputs.en_matrices
        nemat = nemat.merge(self.como_sim.ylds,
                            on=[
                                "sequela_id", "age_group_id", "location_id",
                                "year_id", "sex_id"
                            ])
        nemat = nemat.join(
            pd.DataFrame(data=(nemat.filter(regex="draw.*x").values *
                               nemat.filter(regex="draw.*y").values),
                         index=nemat.index,
                         columns=self.draw_cols))
        nemat = common.apply_restrictions(self.como_version.cause_restrictions,
                                          nemat, self.dimensions.data_list())

        parallelism = ["location_id", "year_id", "sex_id"]
        for slices in self.dimensions.index_slices(parallelism):
            filename = "{yid}_{sid}.csv".format(yid=slices[1], sid=slices[2])
            directory = os.path.join("FILEPATH", str(slices[0]))
            try:
                os.makedirs(directory)
            except OSError as exception:
                if exception.errno != errno.EEXIST:
                    raise
            filepath = os.path.join(directory, filename)
            out_df = nemat.loc[(nemat.location_id == slices[0])
                               & (nemat.year_id == slices[1]) &
                               (nemat.sex_id == slices[2])]
            out_df.to_csv(filepath, index=False)

        nemat = nemat[self.index_cols + self.draw_cols]
        nemat = nemat.groupby(self.index_cols).sum().reset_index()
        return nemat[self.index_cols + self.draw_cols]
Exemplo n.º 2
0
    def _get_sequela_measure(self, modelable_entity_reader, measure_id):

        if self.dimensions is None or self.como_version is None:
            raise AttributeError(
                "cannot set sequela via this method if como_version is None or"
                " dimensions is None")

        df_list = []
        for key in self.sequela_set:
            maybe_df = modelable_entity_reader.reader_results[key]
            try:
                df = maybe_df[maybe_df.measure_id == measure_id]
            except AttributeError:
                raise maybe_df

            df_list.append(df)
        df = pd.concat(df_list)

        # attach sequela ids
        df = df.merge(self.como_version.sequela_list,
                      on='modelable_entity_id',
                      how='left')

        # restrict
        df = apply_restrictions(self.como_version.cause_restrictions, df,
                                self.dimensions.data_list())
        df = df[['sequela_id', 'cause_id', 'healthstate_id'] +
                self.dimensions.index_names + self.dimensions.data_list()]

        return df
Exemplo n.º 3
0
def attach_sequela_metadata_and_restict(df, ds):
    if df.empty:
        return df

    sequela_list = ds.params["sequela_list"]
    dimensions = ds.params["dimensions"]
    cause_restrictions = ds.params["cause_restrictions"]
    df = df.merge(sequela_list, on='modelable_entity_id', how='left')
    df = apply_restrictions(cause_restrictions, df, dimensions.data_list())
    return df[['sequela_id', 'cause_id', 'healthstate_id'] +
              dimensions.index_names + dimensions.data_list()]
Exemplo n.º 4
0
def attach_sexual_violence_metadata_and_restrict(df, ds):
    if df.empty:
        return df

    # cause_id 941 - Sexual violence
    df["cause_id"] = 941
    dimensions = ds.params["dimensions"]
    cause_restrictions = ds.params["cause_restrictions"]
    df = apply_restrictions(cause_restrictions, df, dimensions.data_list())
    df = df[['cause_id'] + dimensions.index_names + dimensions.data_list()]
    df = df.groupby(['cause_id'] + dimensions.index_names).sum()
    return df.reset_index()
Exemplo n.º 5
0
def attach_injuries_metadata_and_restict(df, ds):
    if df.empty:
        return df

    injury_sequela = ds.params["injury_sequela"]
    dimensions = ds.params["dimensions"]
    cause_restrictions = ds.params["cause_restrictions"]
    df = df.merge(injury_sequela, on='modelable_entity_id', how='left')
    df = apply_restrictions(cause_restrictions, df, dimensions.data_list())
    return df[[
        "modelable_entity_id", "cause_id", "rei_id", "sequela_id",
        "healthstate_id"
    ] + dimensions.index_names + dimensions.data_list()]
Exemplo n.º 6
0
 def _NE_ylds(self):
     nemat = self.injury_inputs.en_matrices
     nemat = nemat.merge(self.como_sim.ylds,
                         on=[
                             "sequela_id", "age_group_id", "location_id",
                             "year_id", "sex_id"
                         ])
     nemat = nemat.join(
         pd.DataFrame(data=(nemat.filter(regex="draw.*x").values *
                            nemat.filter(regex="draw.*y").values),
                      index=nemat.index,
                      columns=self.draw_cols))
     nemat = apply_restrictions(self.como_version.cause_restrictions, nemat,
                                self.dimensions.data_list())
     nemat = nemat.merge(self.como_version.injury_dws_by_sequela)
     nemat = nemat.merge(self.como_version.ncode_hierarchy,
                         left_on="n_code",
                         right_on="rei")
     return nemat[self.index_cols + self.draw_cols]
Exemplo n.º 7
0
    def _compute_en_ylds(self, simulated_ylds, long_term_en_prev,
                         short_term_en_ylds):
        # compute the en matrix
        sim_dim = self.dimensions.get_simulation_dimensions(
            measures.PREVALENCE)
        denom = self._prepare_ncode_aggregates(long_term_en_prev.copy())
        denom = denom[sim_dim.index_names + ["sequela_id"] +
                      sim_dim.data_list()]
        numer = long_term_en_prev[sim_dim.index_names +
                                  ["cause_id", "sequela_id"] +
                                  sim_dim.data_list()]
        en_mat = broadcast(broadcast_onto_df=numer,
                           broadcast_df=denom,
                           index_cols=sim_dim.index_names + ["sequela_id"],
                           operator="/")
        en_mat.fillna(0, inplace=True)
        en_mat["measure_id"] = measures.YLD

        # apply the matrix to the ylds
        en_mat = en_mat.merge(simulated_ylds,
                              on=sim_dim.index_names + ["sequela_id"])
        en_mat = en_mat.join(
            pd.DataFrame(data=(en_mat.filter(regex="draw.*x").values *
                               en_mat.filter(regex="draw.*y").values),
                         index=en_mat.index,
                         columns=sim_dim.data_list()))
        en_mat = apply_restrictions(self.como_version.cause_restrictions,
                                    en_mat, sim_dim.data_list())

        # clean the short term ylds
        rei_map = self.como_version.injury_sequela[[
            "cause_id", "sequela_id", "rei_id"
        ]]
        en_mat = en_mat.merge(rei_map, on=["sequela_id", "cause_id"])
        en_mat = en_mat[sim_dim.index_names + ["cause_id", "rei_id"] +
                        sim_dim.data_list()]
        en_mat.fillna(0, inplace=True)

        # compute the combine ylds
        en_ylds = pd.concat([en_mat, short_term_en_ylds])
        en_ylds = en_ylds.groupby(sim_dim.index_names + ["cause_id", "rei_id"])
        en_ylds = en_ylds.sum()[sim_dim.data_list()].reset_index()
        return en_ylds
Exemplo n.º 8
0
    def get_short_term_EN_ylds(self):
        if self.dimensions is None or self.como_version is None:
            raise AttributeError(
                "cannot get en ylds via this method if como_version is None or"
                " dimensions is None")
        en_dimensions = deepcopy(self.dimensions)
        en_dimensions.index_dim.replace_level("measure_id", 3)
        en_dimensions.index_dim.add_level(
            "cause_id",
            self.como_version.cause_restrictions.cause_id.unique().tolist())
        en_dimensions.index_dim.add_level(
            "rei_id",
            self.como_version.ncode_hierarchy.rei_id.unique().tolist())

        estim_df = self._get_short_term_EN_estimation(en_dimensions)
        annual_df = self._get_short_term_EN_annual(en_dimensions)
        df = estim_df.append(annual_df)
        df = apply_restrictions(self.como_version.cause_restrictions, df,
                                self.dimensions.data_list())
        self.short_term_en_ylds = df
Exemplo n.º 9
0
 def set_ecode_prevalence_via_me_reader(self, modelable_entity_reader):
     df = self._get_ecode_measure(modelable_entity_reader, 5)
     df = apply_restrictions(self.como_version.cause_restrictions, df,
                             self.dimensions.data_list())
     self.ecode_prevalence = df
Exemplo n.º 10
0
 def set_short_term_ecode_incidence_via_me_reader(self,
                                                  modelable_entity_reader):
     df = self._get_short_term_ecode_measure(modelable_entity_reader, 6)
     df = apply_restrictions(self.como_version.cause_restrictions, df,
                             self.dimensions.data_list())
     self.short_term_ecode_incidence = df
Exemplo n.º 11
0
 def set_short_term_ecode_ylds_via_me_reader(self, modelable_entity_reader):
     df = self._get_short_term_ecode_measure(modelable_entity_reader, 3)
     df = transform_metric(df, 3, 1)
     df = apply_restrictions(self.como_version.cause_restrictions, df,
                             self.dimensions.data_list())
     self.short_term_ecode_ylds = df