def calc_hypertensive(location, draw): art_path = HYPERTENSION_DATA_FOLDER / f'{location}/data.hdf' art = Artifact(str(art_path), filter_terms=[f'draw=={draw}']) # I can drop indices and know that the means/sds/weights will be aligned b/c we sort the data in vivarium_inputs mean = art.load('risk_factor.high_systolic_blood_pressure.exposure') demographic_index = mean.index # but we'll need it later for the proportions mean = mean.reset_index(drop=True) sd = art.load( 'risk_factor.high_systolic_blood_pressure.exposure_standard_deviation' ).reset_index(drop=True) # these will be the same for all draws weights = prep_weights(art) threshold = pd.Series(HYPERTENSION_THRESHOLD, index=mean.index) dist = EnsembleDistribution(weights=weights, mean=mean[f'draw_{draw}'], sd=sd[f'draw_{draw}']) props = (1 - dist.cdf(threshold)).fillna( 0) # we want the proportion above the threshold props.index = demographic_index props.name = f'draw_{draw}' props = props.droplevel('parameter').fillna(0) return props
def write_data(artifact: Artifact, key: EntityKey, data: pd.DataFrame): if str(key) in artifact: logger.debug(f'Data for {key} already in artifact. Skipping...') else: logger.debug(f'Writing data for {key} to artifact.') artifact.write(str(key), data) return artifact.load(str(key))
def load_and_write_data(artifact: Artifact, key: EntityKey, location: str): """Loads data and writes it to the artifact if not already present. Parameters ---------- artifact The artifact to write to. key The entity key associated with the data to write. location The location associated with the data to load and the artifact to write to. Notes ----- This function supports simple remapping of keys. Complex tailoring of input data should not use this function. To support appending, they should check for the write key in the artifact manually, then load any relevant data and transform it as necessary to write out using ``artifact.write``. """ if str(key) in artifact: logger.debug(f'Data for {key} already in artifact. Skipping...') else: logger.debug(f'Loading data for {key} for location {location}.') data = loader.get_data(key, location) logger.debug(f'Writing data for {key} to artifact.') artifact.write(str(key), data) return artifact.load(str(key))
def load_and_write_data(artifact: Artifact, key: str, location: str, replace: bool): """Loads data and writes it to the artifact if not already present. Parameters ---------- artifact The artifact to write to. key The entity key associated with the data to write. location The location associated with the data to load and the artifact to write to. replace Flag which determines whether or not to overwrite existing data """ if key in artifact and not replace: logger.debug(f'Data for {key} already in artifact. Skipping...') else: logger.debug(f'Loading data for {key} for location {location}.') data = loader.get_data(key, location) if key not in artifact: logger.debug(f'Writing data for {key} to artifact.') artifact.write(key, data) else: # key is in artifact, but should be replaced logger.debug(f'Replacing data for {key} in artifact.') artifact.replace(key, data) return artifact.load(key)
def write_data(artifact: Artifact, key: str, data: pd.DataFrame): """Writes data to the artifact if not already present. Parameters ---------- artifact The artifact to write to. key The entity key associated with the data to write. data The data to write. """ if key in artifact: logger.debug(f'Data for {key} already in artifact. Skipping...') else: logger.debug(f'Writing data for {key} to artifact.') artifact.write(key, data) return artifact.load(key)
def load_and_write_data(artifact: Artifact, key: str, location: str): """Loads data and writes it to the artifact if not already present. Parameters ---------- artifact The artifact to write to. key The entity key associated with the data to write. location The location associated with the data to load and the artifact to write to. """ if key in artifact: logger.debug(f'Data for {key} already in artifact. Skipping...') else: logger.debug(f'Loading data for {key} for location {location}.') data = loader.get_data(key, location) logger.debug(f'Writing data for {key} to artifact.') artifact.write(key, data) return artifact.load(key)