with bz2.open("geo.pic.bz") as f: return pickle.load(f) import datetime, itertools grid = open_cp.data.Grid(xsize=150, ysize=150, xoffset=0, yoffset=0) grid = open_cp.geometry.mask_grid_by_intersection(load_geometry(), grid) with scripted.Data(load_points, load_geometry, start=datetime.datetime(2007, 1, 1), grid=grid) as state: time_range = scripted.TimeRange(datetime.datetime(2007, 10, 1), datetime.datetime(2008, 1, 1), datetime.timedelta(days=1)) state.add_prediction(scripted.NaiveProvider, time_range) for bw in range(50, 301, 10): weight = open_cp.retrohotspot.Quartic(bw) state.add_prediction(scripted.RetroHotspotProvider(weight), time_range) for bw in range(50, 301, 10): weight = open_cp.retrohotspot.Quartic(bw) state.add_prediction(scripted.RetroHotspotCtsProvider(weight), time_range) for tb, sb in itertools.product([30, 50, 70, 90], [3, 4, 5, 6, 7]): weight = open_cp.prohotspot.ClassicWeight(time_bandwidth=tb,
def load_geometry(): frame = gpd.read_file("SouthSide") return frame.geometry[0] def stringToDatetime(s): return datetime.datetime(*(s.split("-"))) earliest_time = "2016-01-01" start_time = "2016-10-01" end_time = "2017-01-01" # Perform the predictions; see `scripted_intro.md` with scripted.Data(load_points, load_geometry, start=stringToDatetime(earliest_time)) as state: time_range = scripted.TimeRange(stringToDatetime(start_time), stringToDatetime(end_time), datetime.timedelta(days=1)) state.add_prediction(scripted.NaiveProvider, time_range) state.score(scripted.HitRateEvaluator) state.score(scripted.HitCountEvaluator) state.process(scripted.HitRateSave("ratesTest.csv")) state.process(scripted.HitCountSave("countsTest.csv"))