def read_concerns(fp, id_col): """ Turns a json file of spatial only features into a pandas dataframe """ items = json.load(open(fp)) grouped_by_source = group_json_by_field(items, 'source') data_frames = [] for source, items in grouped_by_source.items(): results, grouped = group_json_by_location(items) segments = [k for k in list(grouped.keys()) if k] results = {source: [grouped[k]['count'] for k in segments]} df = pd.DataFrame(results, index=segments) data_frames.append((source, df)) return data_frames
# util.find_nearest needs. Eventually this should be cleaned up inproj = pyproj.Proj(init='epsg:4326') outproj = pyproj.Proj(init='epsg:3857') re_point = pyproj.transform(inproj, outproj, address[2], address[1]) point = Point(re_point) record = [{'point': point, 'properties': {}}] util.find_nearest(record, combined_seg, segments_index, 20) if record[0]['properties']['near_id']: near_id = record[0]['properties']['near_id'] crashes, crash_data = util.group_json_by_location( crash_items ) #years=[2015, 2016, 2017], yearfield='Date Time') import ipdb ipdb.set_trace() if str(near_id) in list(crash_data.keys()): print( str(crash_data[str(near_id)]['count']) + " crashes found") elif args.date: print(parse(args.date)) results = [ crash for crash in crash_items if parse(crash['dateOccurred']).date() == parse(args.date).date() ]