def _visualize_grid(self): geo_file = pd.read_csv(self.geo_path, index_col=None) grid_file = pd.read_csv(self.grid_path, index_col=None) geojson_obj = {'type': "FeatureCollection", 'features': []} # get feature_lst geo_feature_lst = [ _ for _ in list(geo_file.columns) if _ not in self.geo_reserved_lst ] grid_feature_lst = [ _ for _ in list(grid_file.columns) if _ not in self.grid_reserved_lst ] for _, row in geo_file.iterrows(): # get feature dictionary row_id, column_id = row['row_id'], row['column_id'] feature_dct = row[geo_feature_lst].to_dict() dyna_i = grid_file[(grid_file['row_id'] == row_id) & (grid_file['column_id'] == column_id)] for f in grid_feature_lst: feature_dct[f] = float(dyna_i[f].mean()) # form a feature feature_i = dict() feature_i['type'] = 'Feature' feature_i['properties'] = feature_dct feature_i['geometry'] = {} feature_i['geometry']['type'] = row['type'] feature_i['geometry']['coordinates'] = eval(row['coordinates']) geojson_obj['features'].append(feature_i) ensure_dir(self.save_path) save_name = "_".join( self.grid_path.split('/')[-1].split('.')) + '.json' print(f"visualization file saved at {save_name}") json.dump(geojson_obj, open(self.save_path + '/' + save_name, 'w', encoding='utf-8'), ensure_ascii=False, indent=4)
def _visualize_geo(self): geo_file = pd.read_csv(self.geo_path, index_col=None) geojson_obj = {'type': "FeatureCollection", 'features': []} extra_feature = [ _ for _ in list(geo_file.columns) if _ not in self.geo_reserved_lst ] for _, row in geo_file.iterrows(): feature_dct = row[extra_feature].to_dict() feature_i = dict() feature_i['type'] = 'Feature' feature_i['properties'] = feature_dct feature_i['geometry'] = {} feature_i['geometry']['type'] = row['type'] feature_i['geometry']['coordinates'] = eval(row['coordinates']) geojson_obj['features'].append(feature_i) ensure_dir(self.save_path) save_name = "_".join(self.geo_path.split('/')[-1].split('.')) + '.json' print(f"visualization file saved at {save_name}") json.dump(geojson_obj, open(self.save_path + '/' + save_name, 'w', encoding='utf-8'), ensure_ascii=False, indent=4)
def __init__(self, config): # config and dataset name self.config = config self.dataset = self.config.get('dataset', '') # logger self._logger = getLogger() # features self.with_time = config.get('with_time', True) # 输入轨迹数据是否包含时间 self.delta_time = config.get('delta_time', True) # True则轨迹输入时间差(s),False则轨迹输入时间datetime.datetime self.with_rd_speed = ('speed' in config['rel']['geo'].keys()) # cache self.cache_dataset = self.config.get('cache_dataset', True) self.parameters_str = \ str(self.dataset) + '_' + str(self.delta_time) self.cache_file_name = os.path.join('./libcity/cache/dataset_cache/', 'map_matching_{}.pkl'.format(self.parameters_str)) self.cache_file_folder = './libcity/cache/dataset_cache/' ensure_dir(self.cache_file_folder) # ensure dataset self.data_path = './raw_data/' + self.dataset + '/' if not os.path.exists(self.data_path): raise ValueError("Dataset {} not exist! Please ensure the path " "'./raw_data/{}/' exist!".format(self.dataset, self.dataset)) # related file names self.geo_file = self.config.get('geo_file', self.dataset) self.rel_file = self.config.get('rel_file', self.dataset) self.dyna_file = self.config.get('dyna_file', self.dataset) self.usr_file = self.config.get('usr_file', self.dataset) self.truth_file = self.config.get('truth_file', self.dataset + '_truth') # result self.trajectory = None self.rd_nwk = None self.route = None # load 5 files if not self.cache_dataset or not os.path.exists(self.cache_file_name): if os.path.exists(self.data_path + self.rel_file + '.rel'): if os.path.exists(self.data_path + self.geo_file + '.geo'): self._load_geo_and_rel() else: raise ValueError('Not found .geo file!') else: raise ValueError('Not found .rel file!') if os.path.exists(self.data_path + self.usr_file + '.usr'): self._load_usr() else: raise ValueError('Not found .rel file!') if os.path.exists(self.data_path + self.dyna_file + '.dyna'): self._load_dyna() else: raise ValueError('Not found .dyna file!') if os.path.exists(self.data_path + self.truth_file + '.dyna'): self._load_truth_dyna()
def _visualize_dyna(self): dyna_file = pd.read_csv(self.dyna_path, index_col=None) dyna_feature_lst = [ _ for _ in list(dyna_file.columns) if _ not in self.dyna_reserved_lst ] geojson_obj = {'type': "FeatureCollection", 'features': []} trajectory = {} GPS_traj = "coordinates" in dyna_file.columns if not GPS_traj: geo_file = pd.read_csv(self.geo_path, index_col=None) a = dyna_file.groupby("entity_id") for entity_id, entity_value in a: if "traj_id" in dyna_file.columns: trajectory[entity_id] = {} entity_value = entity_value.groupby("traj_id") for traj_id, traj_value in entity_value: feature_dct = {"usr_id": entity_id, "traj_id": traj_id} for f in dyna_feature_lst: feature_dct[f] = float(traj_value[f].mean()) feature_i = dict() feature_i['type'] = 'Feature' feature_i['properties'] = feature_dct feature_i['geometry'] = {} feature_i['geometry']['type'] = "LineString" feature_i['geometry']['coordinates'] = [] if GPS_traj: for _, row in traj_value.iterrows(): feature_i['geometry']['coordinates'].append( eval(row['coordinates'])) else: for _, row in traj_value.iterrows(): coor = eval( geo_file.loc[row['location']]['coordinates']) if _ == 0: feature_i['geometry']['coordinates'].append( coor[0]) feature_i['geometry']['coordinates'].append( coor[1]) geojson_obj['features'].append(feature_i) else: feature_dct = {"usr_id": entity_id} feature_i = dict() feature_i['type'] = 'Feature' feature_i['properties'] = feature_dct feature_i['geometry'] = {} feature_i['geometry']['type'] = "LineString" feature_i['geometry']['coordinates'] = [] if GPS_traj: for _, row in entity_value.iterrows(): feature_i['geometry']['coordinates'].append( eval(row['coordinates'])) else: for _, row in entity_value.iterrows(): coor = eval( geo_file.loc[row['location']]['coordinates']) if _ == 0: feature_i['geometry']['coordinates'].append( coor[0]) feature_i['geometry']['coordinates'].append(coor[1]) geojson_obj['features'].append(feature_i) ensure_dir(self.save_path) save_name = "_".join( self.dyna_path.split('/')[-1].split('.')) + '.json' print(f"visualization file saved at {save_name}") json.dump(geojson_obj, open(self.save_path + '/' + save_name, 'w', encoding='utf-8'), ensure_ascii=False, indent=4)