コード例 #1
0
ファイル: e_time_to_cross.py プロジェクト: numpde/transport
def speed_on_the_ground(graph: nx.DiGraph, filename: str):
    mpl.use("Agg")

    with commons.Section("Getting the background OSM map"):
        nodes = pd.DataFrame(data=nx.get_node_attributes(graph, name="pos"),
                             index=["lon", "lat"]).T
        extent = maps.ax4(nodes.lat, nodes.lon)
        osmap = maps.get_map_by_bbox(maps.ax2mb(*extent))

    with mpl.rc_context(PARAM['mpl_style']), commons.Axes() as ax1:
        velocity = pd.Series(nx.get_edge_attributes(
            graph, name="len")) / pd.Series(
                nx.get_edge_attributes(graph, name=PARAM['edge_time_attr']))
        cmap = LinearSegmentedColormap.from_list(
            name="noname", colors=["brown", "r", "orange", "g"])

        ax1.imshow(osmap, extent=extent, interpolation='quadric', zorder=-100)

        nx.draw(graph,
                ax=ax1,
                pos=nx.get_node_attributes(graph, name="pos"),
                edgelist=list(velocity.index),
                edge_color=list(velocity),
                edge_cmap=cmap,
                edge_vmin=0,
                edge_vmax=7,
                with_labels=False,
                arrows=False,
                node_size=0,
                alpha=0.8,
                width=0.3)

        fn = PARAM['out_images_path'] / commons.myname() / F"{filename}.png"
        ax1.figure.savefig(commons.makedirs(fn))
コード例 #2
0
        def manhattan_metric_callback(info: SimpleNamespace):
            assert (info.round is not None)
            assert (info.trips is not None)
            assert (info.edges_met is not None)

            aboutfile_write({**aboutfile_read(), 'valid': False})

            with open(output_edges_fn, 'wb') as fd:
                pickle.dump(info.edges_met, fd)

            aboutfile_write({
                **aboutfile_read(), 'valid':
                True,
                'days':
                list(map(str, days)),
                'weekday':
                weekday,
                'hour':
                hour,
                '#trips':
                len(info.trips),
                'round':
                info.round,
                'timestamp':
                datetime.now(tz=timezone.utc).isoformat()
            })

            fn = makedirs(os.path.join(output_path, F"UV/{info.round:04}.jpg"))
            if os.path.isfile(fn):
                os.remove(fn)
            with nx_draw_met_by_len(info.graph, info.edges_met) as (fig, ax1):
                fig.savefig(fn, **{**PARAM['savefig_args'], 'dpi': 180})
コード例 #3
0
def compare_multiple_trajectories(table_name):
	mpl.use("Agg")

	# Number of trips to plot
	N = 10
	# Number of trajectories per trip
	M = 12

	graph = get_road_graph()
	nodes = pd.DataFrame(data=nx.get_node_attributes(graph, "loc"), index=["lat", "lon"]).T
	edges_len = nx.get_edge_attributes(graph, name="len")

	where = "('2016-05-02 08:00' <= pickup_datetime) and (pickup_datetime <= '2016-05-02 09:00')"
	trips = get_trip_data(table_name, graph, order="", where=where)

	trips = trips.sample(min(N, len(trips)), random_state=1)
	logger.debug(F"{len(trips)} trips")

	with Section("Getting the background OSM map", out=logger.debug):
		extent = maps.ax4(nodes.lat, nodes.lon)
		osmap = maps.get_map_by_bbox(maps.ax2mb(*extent))

	with plt.style.context({**PARAM['mpl_style'], 'font.size': 5}), Axes() as ax1:
		# The background map
		ax1.imshow(osmap, extent=extent, interpolation='quadric', zorder=-100)

		ax1.axis("off")

		ax1.set_xlim(extent[0:2])
		ax1.set_ylim(extent[2:4])

		for (__, trip) in trips.iterrows():
			with Section("Computing candidate trajectories", out=logger.debug):
				trajectories = pd.DataFrame(data={'path': [
					path
					for (__, path) in
					zip(range(M), nx.shortest_simple_paths(graph, source=trip.u, target=trip.v))
				]})
				trajectories['dist'] = [sum(edges_len[e] for e in pairwise(path)) for path in trajectories.path]
				trajectories = trajectories.sort_values(by='dist', ascending=False)

			marker = dict(markersize=2, markeredgewidth=0.2, markerfacecolor="None")
			ax1.plot(trip['pickup_longitude'], trip['pickup_latitude'], 'og', **marker)
			ax1.plot(trip['dropoff_longitude'], trip['dropoff_latitude'], 'xr', **marker)

			cmap = LinearSegmentedColormap.from_list(name="noname", colors=["g", "orange", "r", "brown"])
			colors = cmap(pd.Series(trajectories['dist'] / trip['distance']).rank(pct=True))

			for (c, path) in zip(colors, trajectories.path):
				(y, x) = nodes.loc[list(path)].values.T
				ax1.plot(x, y, c=c, alpha=0.5, lw=0.3)

			# Save to file
			fn = os.path.join(PARAM['out_images_path'], F"{myname()}/{table_name}.png")
			ax1.figure.savefig(makedirs(fn))
コード例 #4
0
def run_experiments() -> pd.DataFrame:
	aliquot = datetime.utcnow().strftime("%Y%m%d-%H%M%S")

	setups = pd.DataFrame(
		# data=list(product([32, 64], [0.1, 0.2, 0.4, 0.8], [(2 ** n) for n in range[7, 13]])),
		data=list(product([32, 64], [0.1, 0.2, 0.4, 0.8], [(2 ** n) for n in range[7, 10]])),
		# data=list(product([64], [0.5], [2 ** 13])),
		# data=list(product([(2 ** n) for n in range[2, 6]], [0.1, 0.2, 0.4], [100, 1000, 10000])),
		# data=list(product([4, 8, 16], [0.1], [10, 100])),
		columns=["graph_size", "noise", "ntrips"],
	)

	logger.debug('\n'.join(map(str, ["Experiments:", setups])))

	for setup in setups.itertuples(index=False):

		# Preserve datatypes
		setup = dict(setup._asdict())
		# Alternative:
		# setup = setup.astype({'graph_size': int, 'noise': float, 'ntrips': int})

		with Section(F"Experiment {setup} is on", out=logger.info):
			# https://www.dataquest.io/blog/settingwithcopywarning/
			with pd.option_context('mode.chained_assignment', None):
				history = experiment(**setup, num_rounds=64)

		with open(makedirs(PARAM['out_experiment_results'].format(aliquot=aliquot, ext="pkl")), 'ab') as fd:
			pickle.dump({**setup, 'history': history}, fd)

		# results.to_json(makedirs(PARAM['out_experiment_results'].format(aliquot=aliquot, ext="json")))

		with open(makedirs(PARAM['out_experiment_results'].format(aliquot=aliquot, ext="json")), 'w') as fd:
			json.dump(
				{
					'setups': setups.to_json(),
					'script': this_module_body(),
					'timestamp': datetime.now(tz=timezone.utc).isoformat(),
				},
				fd
			)

	return setups
コード例 #5
0
def trip_distance_vs_shortest(table_name):
	mpl.use("Agg")

	graph = get_road_graph()
	trips = get_trip_data(table_name, graph)

	with Section("Computing shortest distances", out=logger.debug):
		trips = trips.join(
			pd.DataFrame(
				data=parallel_map(GraphPathDist(graph, edge_weight="len"), zip(trips.u, trips.v)),
				columns=['path', 'shortest'], index=trips.index,
			)
		)

	# On-graph distance vs reported distance [meters]
	df: pd.DataFrame
	df = pd.DataFrame(data=dict(
		reported=(trips['distance']),
		shortest=(trips['shortest']),
	))
	# Convert to [km] and stay below 10km
	df = df.applymap(lambda x: (x / 1e3))
	df = df.applymap(lambda km: (km if (km < 10) else np.nan)).dropna()

	# Hour of the day
	df['h'] = trips['pickup_datetime'].dt.hour

	with plt.style.context(PARAM['mpl_style']):
		with Axes() as ax1:
			ax1.set_aspect(aspect="equal", adjustable="box")
			ax1.grid()
			ax1.plot(*(2 * [[0, df[['reported', 'shortest']].values.max()]]), c='k', ls='--', lw=0.5, zorder=100)
			for (h, hdf) in df.groupby(df['h']):
				c = plt.get_cmap("twilight_shifted")([h / 24])
				ax1.scatter(
					hdf['reported'], hdf['shortest'],
					c=c, s=3, alpha=0.8, lw=0, zorder=10,
					label=(F"{len(hdf)} trips at {h}h")
				)
			ax1.set_xlabel("Reported distance, km")
			ax1.set_ylabel("Naive graph distance, km")
			ax1.set_xticks(range(11))
			ax1.set_yticks(range(11))
			ax1.legend()

			# Save to file
			fn = os.path.join(PARAM['out_images_path'], F"{myname()}/{table_name}.png")
			ax1.figure.savefig(makedirs(fn))

			# Meta info
			json.dump({'number_of_datapoints': len(df)}, open((fn + ".txt"), 'w'))
コード例 #6
0
def trip_trajectories_ingraph(table_name):
	mpl.use("Agg")

	# Max number of trajectories to plot
	N = 1000

	graph = get_road_graph()
	nodes = pd.DataFrame(data=nx.get_node_attributes(graph, "loc"), index=["lat", "lon"]).T

	trips = get_trip_data(table_name, graph)

	trips = trips.sample(min(N, len(trips)))
	logger.debug(F"{len(trips)} trips")

	logger.debug("Computing trajectories")
	trajectories = parallel_map(GraphPathDist(graph).path_only, zip(trips.u, trips.v))

	with Section("Getting the background OSM map", out=logger.debug):
		extent = maps.ax4(nodes.lat, nodes.lon)
		osmap = maps.get_map_by_bbox(maps.ax2mb(*extent))

	with plt.style.context({**PARAM['mpl_style'], 'font.size': 5}):
		with Axes() as ax1:
			# The background map
			ax1.imshow(osmap, extent=extent, interpolation='quadric', zorder=-100)

			ax1.axis("off")

			ax1.set_xlim(extent[0:2])
			ax1.set_ylim(extent[2:4])

			c = 'b'
			if ("green" in table_name): c = "green"
			if ("yello" in table_name): c = "orange"

			logger.debug("Plotting trajectories")
			for traj in trajectories:
				(y, x) = nodes.loc[list(traj)].values.T
				ax1.plot(x, y, c=c, alpha=0.1, lw=0.3)

			# Save to file
			fn = os.path.join(PARAM['out_images_path'], F"{myname()}/{table_name}.png")
			ax1.figure.savefig(makedirs(fn))

			# Meta info
			json.dump({'number_of_trajectories': len(trips)}, open((fn + ".txt"), 'w'))
コード例 #7
0
def trip_trajectories_velocity(table_name):
	mpl.use("Agg")

	# Max number of trajectories to use
	N = 10000

	graph = get_road_graph()
	nodes = pd.DataFrame(data=nx.get_node_attributes(graph, "loc"), index=["lat", "lon"]).T

	edge_name = pd.Series(nx.get_edge_attributes(graph, name="name"))

	where = "('2016-05-02 08:00' <= pickup_datetime) and (pickup_datetime <= '2016-05-02 09:00')"
	trips = get_trip_data(table_name, graph, order="", limit=N, where=where)

	trips['velocity'] = trips['distance'] / trips['duration/s']
	trips = trips.sort_values(by='velocity', ascending=True)

	logger.debug(F"{len(trips)} trips")

	with Section("Computing estimated trajectories", out=logger.debug):
		trips['traj'] = parallel_map(GraphPathDist(graph).path_only, zip(trips.u, trips.v))

	with Section("Getting the background OSM map", out=logger.debug):
		extent = maps.ax4(nodes.lat, nodes.lon)
		osmap = maps.get_map_by_bbox(maps.ax2mb(*extent))

	with Section("Computing edge velocities", out=logger.debug):
		edge_vel = defaultdict(list)
		for (traj, v) in zip(trips.traj, trips.velocity):
			for e in pairwise(traj):
				edge_vel[e].append(v)
		edge_vel = pd.Series({e: np.mean(v or np.nan) for (e, v) in edge_vel.items()}, index=graph.edges)
		edge_vel = edge_vel.dropna()

	with plt.style.context({**PARAM['mpl_style'], 'font.size': 5}), Axes() as ax1:
		# The background map
		ax1.imshow(osmap, extent=extent, interpolation='quadric', zorder=-100)

		ax1.axis("off")

		ax1.set_xlim(extent[0:2])
		ax1.set_ylim(extent[2:4])

		cmap_velocity = LinearSegmentedColormap.from_list(name="noname", colors=["brown", "r", "orange", "g"])

		# marker = dict(markersize=0.5, markeredgewidth=0.1, markerfacecolor="None")
		# ax1.plot(trips['pickup_longitude'], trips['pickup_latitude'], 'og', **marker)
		# ax1.plot(trips['dropoff_longitude'], trips['dropoff_latitude'], 'xr', **marker)

		# for e in edge_name[edge_name == "65th Street Transverse"].index:
		# 	print(e, edge_vel[e])

		edge_vel: pd.Series
		# edge_vel = edge_vel.rank(pct=True)
		edge_vel = edge_vel.clip(lower=2, upper=6).round()
		edge_vel = (edge_vel - edge_vel.min()) / (edge_vel.max() - edge_vel.min())
		edge_vel = edge_vel.apply(cmap_velocity)

		nx.draw_networkx_edges(
			graph.edge_subgraph(edge_vel.index),
			ax=ax1,
			pos=nx.get_node_attributes(graph, name="pos"),
			edge_list=list(edge_vel.index),
			edge_color=list(edge_vel),
			# edge_cmap=cmap_velocity,
			# vmin=0, vmax=1,
			with_labels=False, arrows=False, node_size=0, alpha=0.8, width=0.3,
		)

		# Save to file
		fn = os.path.join(PARAM['out_images_path'], F"{myname()}/{table_name}.png")
		ax1.figure.savefig(makedirs(fn))

		# Meta info
		json.dump({'number_of_trajectories': len(trips)}, open((fn + ".txt"), 'w'))
コード例 #8
0
logger.basicConfig(
	level=logger.DEBUG,
	format="%(levelname)-8s [%(asctime)s] : %(message)s",
	datefmt="%Y%m%d %H:%M:%S %Z",
)
logger.getLogger('matplotlib').setLevel(logger.WARNING)
logger.getLogger('PIL').setLevel(logger.WARNING)

# ~~~~ SETTINGS ~~~~ #

PARAM = {
	'taxidata': "data/taxidata/sqlite/UV/db.db",
	'road_graph': "data/road_graph/UV/nx_digraph_naive.pkl",

	'out_images_path': makedirs("exploration/"),

	'mpl_style': {
		'font.size': 3,
		'xtick.major.size': 2,
		'ytick.major.size': 0,
		'xtick.major.pad': 1,
		'ytick.major.pad': 1,

		'savefig.bbox': "tight",
		'savefig.pad_inches': 0,
		'savefig.dpi': 300,
	},
}

コード例 #9
0
IFILE = {
    #'mapmatched' : "OUTPUT/14/mapmatched/{scenario}/{routeid}-{direction}/UV/{mapmatch_uuid}.{ext}",
    'segment_by_route': "OUTPUT/13/{scenario}/byroute/UV/{routeid}-{dir}.json",
    'MOTC_routes':
    "OUTPUT/00/ORIGINAL_MOTC/{City}/CityBusApi_StopOfRoute.json",
}

## =================== OUTPUT :

OFILE = {
    'timetable_json':
    "OUTPUT/17/timetable/{scenario}/json/{routeid}-{dir}.json",
}

commons.makedirs(OFILE)

## ==================== PARAM :

PARAM = {
    #'mapbox_api_token' : open(".credentials/UV/mapbox-token.txt", 'r').read(),

    # When is the bus run too short at the tails?
    'tail_eta_patch_dist': 50,  # meters
    # Have at least those many waypoints close to the run
    'min_near_run': 3,
    'n_parallel_jobs': min(12, ceil(cpu_count() / 1.5)),
}

## ====================== AUX :
コード例 #10
0
def plot_results():
	mpl.use("Agg")

	def summary(history: pd.DataFrame) -> pd.DataFrame:
		history = history.div(history['secret'], axis=0).drop(columns=['secret', 0])
		history = (100 * history.transform(np.log10).mean(axis=0)).transform(np.abs)
		return history

	cat = pd.DataFrame(glob(PARAM['out_experiment_results'].format(aliquot="*", ext="*")), columns=["file"])
	cat = cat.assign(dir=list(map(os.path.dirname, cat.file)))
	cat = cat.assign(sig=list(map(os.path.basename, cat.dir)))
	cat = cat.assign(ext=list(map(last, map(os.path.splitext, cat.file))))
	cat = cat.pivot(index='dir', columns='ext', values='file')

	for (folder, meta) in cat.iterrows():
		logger.info(F"Folder: {os.path.relpath(folder, os.path.dirname(__file__))}")

		df: pd.DataFrame
		df = pd.DataFrame()
		with open(meta['.pkl'], 'rb') as fd:
			with suppress(EOFError):
				while df is not None:
					df = df.append(pickle.load(fd), ignore_index=True)

		df = df[df.ntrips > 10]
		df = df[~df.history.isna()]

		df = df.astype({'noise': float, 'graph_size': int, 'ntrips': int})

		for (graph_size, df1) in df.groupby(df.graph_size):
			for (noise, df2) in df1.groupby(df1.noise):

				image_file = makedirs(os.path.join(folder, "images", F"graph_size={graph_size}", F"noise={noise}".replace(".", "p"), "round.{ext}"))

				fig: plt.Figure
				ax1: plt.Axes
				(fig, ax1) = plt.subplots()
				for (ntrips, history) in zip(df2['ntrips'], map(summary, df2['history'])):
					ax1.plot(history, marker='.', ls='--', label=ntrips)
				ax1.set_xscale("log")
				ax1.set_yscale("log")
				ax1.set_ylabel("Geometric average relative error, %")
				ax1.set_ylim(1e-1, 1e1)
				ax1.grid()
				ax1.set_title(F"Graph size: {graph_size}, noise: {noise}")
				ax1.legend()

				fig.savefig(image_file.format(ext="png"), **PARAM['savefig_args'])

				plt.close(fig)

		for (graph_size, df1) in df.groupby(df.graph_size):
			image_file = makedirs(os.path.join(folder, "images", F"graph_size={graph_size}", "ntrips.{ext}"))

			fig: plt.Figure
			ax1: plt.Axes
			(fig, ax1) = plt.subplots()

			for (noise, df2) in df1.groupby(df1.noise):
				history = pd.Series(index=df2['ntrips'], data=list(map(min, map(summary, df2['history']))))
				ax1.plot(history, marker='.', ls='--', label=noise)

			ax1.set_xlabel("Number of trips")
			ax1.set_ylabel("Geometric average of relative error, %")
			ax1.set_xscale("log")
			ax1.set_yscale("log")
			ax1.set_ylim(1e-1, 1e1)
			ax1.grid()
			ax1.set_title(F"Graph size: {graph_size}")
			ax1.legend()

			fig.savefig(image_file.format(ext="png"), **PARAM['savefig_args'])

			plt.close(fig)
コード例 #11
0
from types import SimpleNamespace
from collections import Counter
from itertools import product
from more_itertools import pairwise, first, last

from contextlib import suppress

from progressbar import progressbar

import seaborn as sb
import matplotlib as mpl
import matplotlib.pyplot as plt

from percache import Cache
cache = Cache(makedirs(os.path.join(os.path.dirname(__file__), "synthetic/UV/percache_runs")), livesync=True)

# ~~~~ LOGGING ~~~~ #

import logging as logger
logger.basicConfig(level=logger.DEBUG, format="%(levelname)-8s [%(asctime)s] @%(funcName)s : %(message)s", datefmt="%Y%m%d %H:%M:%S %Z")
logger.getLogger('matplotlib').setLevel(logger.WARNING)
logger.getLogger('PIL').setLevel(logger.WARNING)


# ~~~~ SETTINGS ~~~~ #

PARAM = {
	'out_experiment_results': os.path.join(os.path.dirname(__file__), "synthetic", "{aliquot}", "results.{ext}"),
	'savefig_args': dict(bbox_inches='tight', pad_inches=0, jpeg_quality=0.9, dpi=300),
}
コード例 #12
0
ファイル: 14_mapmatch.py プロジェクト: numpde/transport
def mapmatch_runs(scenario, runs):

    # Road network (main graph component) with nearest-neighbor tree for the nodes
    g: nx.DiGraph

    (g, knn) = commons.inspect(['g', 'knn'])(pickle.load(
        open(IFILE['OSM_graph_file'], 'rb'))['main_component_with_knn'])

    g = trim_graph_to_busable(g)

    # Nearest edges computer
    kne = (lambda q: graph.estimate_kne(g, knn, q, ke=20))

    mpl.use('Agg')
    mpl.rcParams['figure.max_open_warning'] = 100

    import matplotlib.pyplot as plt

    #
    def make_figure(result) -> dict:
        ax: plt.Axes

        if ('plt' in result):
            (fig, ax) = commons.inspect({'plt': ('fig', 'ax')})(result)
        else:
            (fig, ax) = plt.subplots()
            result['plt'] = {'fig': fig, 'ax': ax}
            result['auto_close_fig'] = commons.UponDel(lambda: plt.close(fig))

        ax.cla()

        ax.set_title("{} ({}%)".format(
            result['status'], math.floor(100 * result.get('progress', 0))))

        if ('waypoints_all' in result):
            (y, x) = zip(*result['waypoints_all'])
            ax.plot(x, y, 'o', c='m', markersize=2)

        if ('geo_path' in result):
            (y, x) = zip(*result['geo_path'])
            ax.plot(x, y, 'b--', linewidth=2, zorder=100)

        return result['plt']

    #
    def mm_callback(result) -> None:

        if (result['status'] == "opti"):
            if (dt.datetime.now() < result.get('nfu', dt.datetime.min)):
                return

        # Log into a GPX file
        if ('waypoints_all' in result):
            with open(OFILE['progress'].format(ext="gpx"), 'w') as fd:
                fd.write(
                    graph.simple_gpx(result['waypoints_all'],
                                     [result.get('geo_path', [])]).to_xml())

        # Save figure
        make_figure(result)
        with open(OFILE['progress'].format(ext="png"), 'wb') as fd:
            fig.savefig(fd, bbox_inches='tight', pad_inches=0)

        # Next figure update
        result['nfu'] = dt.datetime.now() + dt.timedelta(seconds=5)

    # Collect all bus runs
    runs_by_runid = {run[KEYS.runid]: run for run in runs}

    # Collect all waypoints
    waypoints_by_runid = {
        runid: list(map(tuple, run[KEYS.pos]))
        for (runid, run) in runs_by_runid.items()
    }

    #commons.logger.debug(json.dumps(runs, indent=2))

    commons.logger.info("Running mapmatch on {} runs".format(len(runs)))

    # MAPMATCH RUNS
    results = graph.mapmatch(waypoints_by_runid,
                             g,
                             kne,
                             knn=knn,
                             callback=None,
                             stubborn=0.2,
                             many_partial=True)

    for result in results:

        commons.logger.info("Got mapmatch with waypoints {}".format(
            result['waypoints_used']))

        # The run on which mapmatch operated
        run = runs_by_runid[result['waypoint_setid']]

        # Collect initial info about the mapmatch attempt
        mapmatch_attempt = {
            k: run[k]
            for k in [KEYS.routeid, KEYS.dir, KEYS.runid, KEYS.busid]
        }

        # Attach a unique identifier for this mapmatch
        mapmatch_attempt['MapMatchUUID'] = uuid.uuid4().hex

        # Filename without the extension
        fn = OFILE['mapmatched'].format(
            scenario=scenario,
            routeid=mapmatch_attempt[KEYS.routeid],
            direction=mapmatch_attempt[KEYS.dir],
            mapmatch_uuid=mapmatch_attempt['MapMatchUUID'],
            ext="{ext}")

        # Copy relevant fields from the mapmatcher result
        for k in ['waypoints_used', 'path', 'geo_path', 'mapmatcher_version']:
            mapmatch_attempt[k] = result[k]

        # Save the result in different formats, in this directory
        commons.makedirs(fn.format(ext='~~~'))

        #  o) Image

        try:
            # Make and save the figure
            fig = make_figure(result)['fig']
            with open(fn.format(ext="png"), 'wb') as fd:
                fig.savefig(fd, bbox_inches='tight', pad_inches=0)
        except Exception as e:
            commons.logger.warning("Could not save figure {} ({})".format(
                fn.format(ext="png"), e))

        #  o) JSON

        try:
            with open(fn.format(ext="json"), 'w') as fd:
                json.dump(mapmatch_attempt, fd)
        except Exception as e:
            commons.logger.warning(
                "Failed to write mapmatch file {} ({})".format(
                    fn.format(ext="json"), e))

        #  o) GPX

        try:
            with open(fn.format(ext="gpx"), 'w') as fd:
                fd.write(
                    graph.simple_gpx(mapmatch_attempt['waypoints_used'],
                                     [mapmatch_attempt['geo_path']]).to_xml())
        except Exception as e:
            commons.logger.warning("Failed to write GPX file {} ({})".format(
                fn.format(ext="gpx"), e))

        time.sleep(1)
コード例 #13
0
def compute_metric_for_table(table_name):
    # Get the database table timespan
    with sqlite3.connect(PARAM['taxidata']) as con:
        sql = F"SELECT min(pickup_datetime), max(dropoff_datetime) FROM [{table_name}]"
        dates = pd.date_range(*pd.read_sql_query(sql=sql, con=con).iloc[0])

    #
    # for ((weekday, days), hour) in product(dates.groupby(dates.weekday).items(), [8]):

    for ((weekday, days),
         hour) in product(dates.groupby(dates.weekday).items(), range[0, 23]):
        logger.debug(F"weekday/hour = {weekday}/{hour} over {len(days)} days")

        # Filenames for output
        output_path = makedirs(
            os.path.join(PARAM['out_metric'],
                         F"{table_name}/{weekday}/{hour:02}/"))
        output_edges_fn = os.path.join(output_path, "edges_met.pkl")
        output_about_fn = os.path.join(output_path, "edges_met.json")

        def aboutfile_read():
            try:
                with open(output_about_fn, 'r') as fd:
                    return json.load(fd)
            except FileNotFoundError:
                return {'locked': False}

        def aboutfile_write(info):
            with open(output_about_fn, 'w') as fd:
                json.dump(info, fd)

        def manhattan_metric_callback(info: SimpleNamespace):
            assert (info.round is not None)
            assert (info.trips is not None)
            assert (info.edges_met is not None)

            aboutfile_write({**aboutfile_read(), 'valid': False})

            with open(output_edges_fn, 'wb') as fd:
                pickle.dump(info.edges_met, fd)

            aboutfile_write({
                **aboutfile_read(), 'valid':
                True,
                'days':
                list(map(str, days)),
                'weekday':
                weekday,
                'hour':
                hour,
                '#trips':
                len(info.trips),
                'round':
                info.round,
                'timestamp':
                datetime.now(tz=timezone.utc).isoformat()
            })

            fn = makedirs(os.path.join(output_path, F"UV/{info.round:04}.jpg"))
            if os.path.isfile(fn):
                os.remove(fn)
            with nx_draw_met_by_len(info.graph, info.edges_met) as (fig, ax1):
                fig.savefig(fn, **{**PARAM['savefig_args'], 'dpi': 180})

        # Defaults
        skip_rounds = 0
        edges_met = None

        # Job status
        about = aboutfile_read()

        if about.get('locked', True):
            # Somebody may be working on this now
            logger.info(F"Locked(?) {output_about_fn} exists -- skipping")
            continue

        if (about.get('valid') == False):
            logger.warning(
                F"Job {output_about_fn} explicitly flagged as invalid -- skipping"
            )
            continue

        if about.get('valid'):
            logger.info(F"Trying to resume {output_about_fn}")
            skip_rounds = about['round']
            with open(output_edges_fn, 'rb') as fd:
                edges_met = pickle.load(fd)

        try:
            # Quickly, reserve the job
            logger.debug("Marking job as locked")
            aboutfile_write({**about, 'locked': True, 'exception': None})

            # Load and sanitize the road graph
            graph = get_road_graph()

            # Taxidata query details
            where = " OR ".join(
                "(('{a}' <= pickup_datetime) AND (dropoff_datetime < '{b}'))".
                format(
                    a=(day + pd.Timedelta(hour + 0, unit='h')),
                    b=(day + pd.Timedelta(hour + 1, unit='h')),
                ) for day in days)
            # Query the taxidata database
            trips = get_taxidata_trips(table_name, where=where)

            # Attach nearest nodes
            trips = trips.join(project(trips, graph), how="inner")

            #
            opt = options_refine_effective_metric()
            opt.min_trip_distance_m = 200
            opt.max_trip_distance_m = 7e3
            opt.num_rounds = 100

            # Run the metric computation loop
            # Note: The callback function records the results
            refine_effective_metric(graph,
                                    trips,
                                    opt=opt,
                                    callback=manhattan_metric_callback,
                                    skip_rounds=skip_rounds,
                                    edges_met=edges_met)

        except:
            (exc_type, value, traceback) = sys.exc_info()
            aboutfile_write({
                **aboutfile_read(), 'exception': (exc_type.__name__)
            })
            raise
        finally:
            logger.debug("Marking job as non-locked")
            aboutfile_write({**aboutfile_read(), 'locked': False})
コード例 #14
0
                        fn)


# ~~~~ SETTINGS ~~~~ #

PARAM = {
    'taxidata':
    datapath("taxidata/sqlite/UV/db.db"),
    'road_graph':
    datapath("road_graph/UV/nx_digraph_naive.pkl"),

    # Tolerance for accepting a nearest node (distance in meters)
    'tol_nearest_nodes/m':
    20,
    'out_metric':
    makedirs(os.path.join(os.path.dirname(__file__), "manhattan_metric/")),
    'savefig_args':
    dict(bbox_inches='tight', pad_inches=0, jpeg_quality=0.9, dpi=300),
}

# ~~~~ HELPERS ~~~~ #

# ~~~~ GRAPHICS ~~~~ #


@contextmanager
@retry(KeyboardInterrupt, tries=2, delay=1)
def nx_draw_met_by_len(graph, edges_met=None, mpl_backend="Agg", printer=None):
    mpl.use(mpl_backend)

    with Section("Preparing to draw graph", out=printer):