Exemple #1
0
def main():
    parser = argparse.ArgumentParser(
        parents=[cache_policy.argument_parser, build_world.argument_parser]
    )
    parser.add_argument("--processes", type=int)
    parser.add_argument("--chunk_size", type=int)
    parser.add_argument(
        "--site_dir", type=pathlib.Path, default=pathlib.Path("site_out")
    )
    parser.add_argument("--region_regex")
    args = parser.parse_args()
    make_map.setup(args)

    world = build_world.get_world(
        session=cache_policy.new_session(args), args=args
    )

    def get_regions(r):
        if r.matches_regex(args.region_regex):
            yield r
        yield from (a for s in r.subregions.values() for a in get_regions(s))

    all_regions = list(get_regions(world))

    print(f"Generating {len(all_regions)} pages in {args.site_dir}...")
    style.write_style_files(args.site_dir)

    # Process regions using multiple cores.
    processes = args.processes or os.cpu_count() * 2
    chunk_size = args.chunk_size or max(1, len(all_regions) // (4 * processes))
    with multiprocessing.Pool(processes=args.processes) as pool:
        pool.starmap(
            make_region_page,
            ((r, args) for r in all_regions),
            chunksize=chunk_size,
        )
                name = f"Plant {tp_id}"
                name += f" ({' '.join(details)})" if details else ""
                name += f": {row.population_served:,.0f}p"

                w.index = w.index + 0.5 * (w.date_end - w.index)
                ww_metrics[name] = make_metric(
                    c=matplotlib.cm.tab20b.colors[(12 + len(ww_metrics)) % 20],
                    em=1 if row.population_served > 0.25 * pop else 0,
                    ord=1.0,
                    cred=cdc_credits,
                    v=((1 + (w.ptc_15d / 100))**(1 / 15) - 1) * 100,
                )


if __name__ == "__main__":
    import argparse

    from covid import build_atlas
    from covid import cache_policy
    from covid import logging_policy  # noqa

    parser = argparse.ArgumentParser(parents=[cache_policy.argument_parser])
    parser.add_argument("--print_data", action="store_true")

    args = parser.parse_args()
    session = cache_policy.new_session(args)
    atlas = build_atlas.get_atlas(session)
    add_metrics(session=session, atlas=atlas)
    print(atlas.world.debug_tree(with_data=args.print_data))
Exemple #3
0
def credits():
    return {
        "https://tinyurl.com/statepolicies":
        "COVID-19 US State Policy Database"
    }


if __name__ == "__main__":
    import argparse
    import textwrap

    from covid import cache_policy

    parser = argparse.ArgumentParser(parents=[cache_policy.argument_parser])
    events = get_events(session=cache_policy.new_session(parser.parse_args()))
    for state_fips, state_events in events.groupby(level="state_fips"):
        state = us.states.lookup(f"{state_fips:02d}")
        print(f"{state.name}:")
        for date, date_events in state_events.groupby(level="date"):
            print(date.strftime("  %Y-%m-%d"))
            for area, area_events in date_events.groupby("policy_area"):
                print(f"    {area}")
                for e in area_events.itertuples():
                    s = ["⬇️ ", "🔷", "🔹", "▪️ ", "🔸", "🔶", "⏫"][e.score + 3]
                    text = " ".join(x for x in [s, e.emoji, e.policy] if x)
                    print(
                        textwrap.TextWrapper(
                            initial_indent="     ",
                            subsequent_indent="         ",
                            width=79,
Exemple #4
0

_add_fips = addfips.AddFIPS()


def _fips_from_county(county):
    try:
        return int(_add_fips.get_county_fips(county, "CA"))
    except BaseException:
        raise ValueError(f'Error looking up county "{county}"')


def credits():
    return {OVERVIEW_URL: "California Blueprint Data Chart"}


if __name__ == "__main__":
    import argparse

    from covid import cache_policy

    parser = argparse.ArgumentParser(parents=[cache_policy.argument_parser])
    session = cache_policy.new_session(parser.parse_args())
    counties = get_counties(session)
    for fips, county in counties.items():
        assert fips == county.fips
        print(county.fips, county.name)
        for date, tier in county.tier_history.items():
            print(f'    {date.strftime("%Y-%m-%d")} '
                  f"{tier.number}: {tier.emoji} {tier.color} ({tier.name})")
Exemple #5
0
    data.census_fips_code = data.census_fips_code.astype(int)

    # Fill in missing state-level FIPS codes.
    for state in us.states.STATES_AND_TERRITORIES:
        mask = data.iso_3166_2_code.eq(f"US-{state.abbr}")
        data.census_fips_code.mask(mask, int(state.fips), inplace=True)

    return data


def credits():
    return {
        "https://www.google.com/covid19/mobility/": "Google Community Mobility Reports"
    }


if __name__ == "__main__":
    import argparse

    from covid import cache_policy

    parser = argparse.ArgumentParser(parents=[cache_policy.argument_parser])
    data = get_mobility(cache_policy.new_session(parser.parse_args()))
    data.info()
    print()
    print("Arbitrary record:")
    print(data.iloc[len(data) // 2])
    print()
    print("Last California record:")
    print(data[data.census_fips_code.eq(6)].iloc[-1])