def test_vega_weighted_pointmap(): vega = vega_weighted_pointmap( 1900, 1410, [-73.998427, 40.730309, -73.954348, 40.780816], "#2DEF4A", [2, 5], [1, 10], 0.5, "EPSG:3857").build() vega_dict = json.loads(vega) assert vega_dict["width"] == 1900 assert vega_dict["height"] == 1410 assert vega_dict["marks"][0]["encode"]["enter"]["bounding_box"]["value"][ 0] == -73.998427 assert vega_dict["marks"][0]["encode"]["enter"]["bounding_box"]["value"][ 1] == 40.730309 assert vega_dict["marks"][0]["encode"]["enter"]["bounding_box"]["value"][ 2] == -73.954348 assert vega_dict["marks"][0]["encode"]["enter"]["bounding_box"]["value"][ 3] == 40.780816 assert vega_dict["marks"][0]["encode"]["enter"]["color"][ "value"] == "#2DEF4A" assert vega_dict["marks"][0]["encode"]["enter"]["color_ruler"]["value"][ 0] == 2 assert vega_dict["marks"][0]["encode"]["enter"]["color_ruler"]["value"][ 1] == 5 assert vega_dict["marks"][0]["encode"]["enter"]["stroke_ruler"]["value"][ 0] == 1 assert vega_dict["marks"][0]["encode"]["enter"]["stroke_ruler"]["value"][ 1] == 10 assert vega_dict["marks"][0]["encode"]["enter"]["opacity"]["value"] == 0.5 assert vega_dict["marks"][0]["encode"]["enter"]["coordinate_system"][ "value"] == "EPSG:3857" vega = vega_weighted_pointmap( 1900, 1410, [-73.998427, 40.730309, -73.954348, 40.780816], "#2DEF4A", [2, 5], [1, 10], 0.5).build() vega_dict = json.loads(vega) assert vega_dict["marks"][0]["encode"]["enter"]["coordinate_system"][ "value"] == "EPSG:4326"
def draw_weighted_point_map(spark): start_time = time.time() df = spark.read.format("csv").option("header", True).option( "delimiter", "," ).schema( "VendorID string, tpep_pickup_datetime timestamp, tpep_dropoff_datetime timestamp, passenger_count long, trip_distance double, pickup_longitude double, pickup_latitude double, dropoff_longitude double, dropoff_latitude double, fare_amount double, tip_amount double, total_amount double, buildingid_pickup long, buildingid_dropoff long, buildingtext_pickup string, buildingtext_dropoff string" ).load("file:///tmp/0_5M_nyc_taxi_and_building.csv").cache() df.createOrReplaceTempView("nyc_taxi") register_funcs(spark) # single color and single stroke width res1 = spark.sql( "select ST_Point(pickup_longitude, pickup_latitude) as point from nyc_taxi where ST_Within(ST_Point(pickup_longitude, pickup_latitude), ST_GeomFromText('POLYGON ((-73.998427 40.730309, -73.954348 40.730309, -73.954348 40.780816 ,-73.998427 40.780816, -73.998427 40.730309))'))" ) vega1 = vega_weighted_pointmap( 1024, 896, [-73.998427, 40.730309, -73.954348, 40.780816], ["#87CEEB"], [0, 2], [5], 1.0, "EPSG:4326") res1 = weighted_pointmap(vega1, res1) save_png(res1, '/tmp/weighted_pointmap_0_0.png') # multiple color and single stroke width res2 = spark.sql( "select ST_Point(pickup_longitude, pickup_latitude) as point, tip_amount as c from nyc_taxi where ST_Within(ST_Point(pickup_longitude, pickup_latitude), ST_GeomFromText('POLYGON ((-73.998427 40.730309, -73.954348 40.730309, -73.954348 40.780816 ,-73.998427 40.780816, -73.998427 40.730309))'))" ) vega2 = vega_weighted_pointmap( 1024, 896, [-73.998427, 40.730309, -73.954348, 40.780816], ["#0000FF", "#FF0000"], [0, 2], [5], 1.0, "EPSG:4326") res2 = weighted_pointmap(vega2, res2) save_png(res2, '/tmp/weighted_pointmap_1_0.png') # single color and multiple stroke width res3 = spark.sql( "select ST_Point(pickup_longitude, pickup_latitude) as point, fare_amount as s from nyc_taxi where ST_Within(ST_Point(pickup_longitude, pickup_latitude), ST_GeomFromText('POLYGON ((-73.998427 40.730309, -73.954348 40.730309, -73.954348 40.780816 ,-73.998427 40.780816, -73.998427 40.730309))'))" ) vega3 = vega_weighted_pointmap( 1024, 896, [-73.998427, 40.730309, -73.954348, 40.780816], ["#87CEEB"], [0, 2], [0, 10], 1.0, "EPSG:4326") res3 = weighted_pointmap(vega3, res3) save_png(res3, '/tmp/weighted_pointmap_0_1.png') # multiple color and multiple stroke width res4 = spark.sql( "select ST_Point(pickup_longitude, pickup_latitude) as point, tip_amount as c, fare_amount as s from nyc_taxi where ST_Within(ST_Point(pickup_longitude, pickup_latitude), ST_GeomFromText('POLYGON ((-73.998427 40.730309, -73.954348 40.730309, -73.954348 40.780816 ,-73.998427 40.780816, -73.998427 40.730309))'))" ) vega4 = vega_weighted_pointmap( 1024, 896, [-73.998427, 40.730309, -73.954348, 40.780816], ["#0000FF", "#FF0000"], [0, 2], [0, 10], 1.0, "EPSG:4326") res4 = weighted_pointmap(vega4, res4) save_png(res4, '/tmp/weighted_pointmap_1_1.png') spark.sql("show tables").show() spark.catalog.dropGlobalTempView("nyc_taxi") print("--- %s seconds ---" % (time.time() - start_time))
def draw_weighted_point_map(spark): df = spark.read.format("csv").option("header", True).option( "delimiter", "," ).schema( "VendorID string, tpep_pickup_datetime timestamp, tpep_dropoff_datetime timestamp, passenger_count long, trip_distance double, pickup_longitude double, pickup_latitude double, dropoff_longitude double, dropoff_latitude double, fare_amount double, tip_amount double, total_amount double, buildingid_pickup long, buildingid_dropoff long, buildingtext_pickup string, buildingtext_dropoff string" ).load(data_path).cache() df.show(20, False) df.createOrReplaceTempView("nyc_taxi") register_funcs(spark) # single color and single stroke width res1 = spark.sql( "select ST_Point(pickup_longitude, pickup_latitude) as point from nyc_taxi where ST_Within(ST_Point(pickup_longitude, pickup_latitude), 'POLYGON ((-73.998427 40.730309, -73.954348 40.730309, -73.954348 40.780816 ,-73.998427 40.780816, -73.998427 40.730309))')" ) vega1 = vega_weighted_pointmap( 1024, 896, [-73.998427, 40.730309, -73.954348, 40.780816], "#87CEEB", [0, 2], [5], 1.0, "EPSG:4326") res1 = weighted_pointmap(res1, vega1) save_png(res1, '/tmp/weighted_pointmap_0_0.png') # multiple color and single stroke width res2 = spark.sql( "select ST_Point(pickup_longitude, pickup_latitude) as point, tip_amount as c from nyc_taxi where ST_Within(ST_Point(pickup_longitude, pickup_latitude), 'POLYGON ((-73.998427 40.730309, -73.954348 40.730309, -73.954348 40.780816 ,-73.998427 40.780816, -73.998427 40.730309))')" ) vega2 = vega_weighted_pointmap( 1024, 896, [-73.998427, 40.730309, -73.954348, 40.780816], "blue_to_red", [0, 2], [5], 1.0, "EPSG:4326") res2 = weighted_pointmap(res2, vega2) save_png(res2, '/tmp/weighted_pointmap_1_0.png') # single color and multiple stroke width res3 = spark.sql( "select ST_Point(pickup_longitude, pickup_latitude) as point, fare_amount as s from nyc_taxi where ST_Within(ST_Point(pickup_longitude, pickup_latitude), 'POLYGON ((-73.998427 40.730309, -73.954348 40.730309, -73.954348 40.780816 ,-73.998427 40.780816, -73.998427 40.730309))')" ) vega3 = vega_weighted_pointmap( 1024, 896, [-73.998427, 40.730309, -73.954348, 40.780816], "#87CEEB", [0, 2], [0, 10], 1.0, "EPSG:4326") res3 = weighted_pointmap(res3, vega3) save_png(res3, '/tmp/weighted_pointmap_0_1.png') # multiple color and multiple stroke width res4 = spark.sql( "select ST_Point(pickup_longitude, pickup_latitude) as point, tip_amount as c, fare_amount as s from nyc_taxi where ST_Within(ST_Point(pickup_longitude, pickup_latitude), 'POLYGON ((-73.998427 40.730309, -73.954348 40.730309, -73.954348 40.780816 ,-73.998427 40.780816, -73.998427 40.730309))')" ) vega4 = vega_weighted_pointmap( 1024, 896, [-73.998427, 40.730309, -73.954348, 40.780816], "blue_to_red", [0, 2], [0, 10], 1.0, "EPSG:4326") res4 = weighted_pointmap(res4, vega4) save_png(res4, '/tmp/weighted_pointmap_1_1.png') spark.sql("show tables").show() spark.catalog.dropGlobalTempView("nyc_taxi")
def test_weighted_point_map(): x_data = [] y_data = [] c_data = [] s_data = [] x_data.append(10) x_data.append(20) x_data.append(30) x_data.append(40) x_data.append(50) y_data.append(10) y_data.append(20) y_data.append(30) y_data.append(40) y_data.append(50) c_data.append(1) c_data.append(2) c_data.append(3) c_data.append(4) c_data.append(5) s_data.append(2) s_data.append(4) s_data.append(6) s_data.append(8) s_data.append(10) arr_x = pandas.Series(x_data) arr_y = pandas.Series(y_data) points = arctern.ST_Point(arr_x, arr_y) arr_c = pandas.Series(c_data) arr_s = pandas.Series(s_data) vega1 = vega_weighted_pointmap(300, 200, [-73.998427, 40.730309, -73.954348, 40.780816], ["#87CEEB"], [1, 5], [5], 1.0, "EPSG:3857") res1 = arctern.weighted_point_map(vega1, points) save_png(res1, "/tmp/test_weighted_0_0.png") vega2 = vega_weighted_pointmap(300, 200, [-73.998427, 40.730309, -73.954348, 40.780816], ["#0000FF", "#FF0000"], [1, 5], [5], 1.0, "EPSG:3857") res2 = arctern.weighted_point_map(vega2, points, color_weights=arr_c) save_png(res2, "/tmp/test_weighted_1_0.png") vega3 = vega_weighted_pointmap(300, 200, [-73.998427, 40.730309, -73.954348, 40.780816], ["#87CEEB"], [1, 5], [1, 10], 1.0, "EPSG:3857") res3 = arctern.weighted_point_map(vega3, points, size_weights=arr_s) save_png(res3, "/tmp/test_weighted_0_1.png") vega4 = vega_weighted_pointmap(300, 200, [-73.998427, 40.730309, -73.954348, 40.780816], ["#0000FF", "#FF0000"], [1, 5], [1, 10], 1.0, "EPSG:3857") res4 = arctern.weighted_point_map(vega4, points, color_weights=arr_c, size_weights=arr_s) save_png(res4, "/tmp/test_weighted_1_1.png")
def draw_world_weighted_point_map(spark): df = spark.read.format("csv").option("header", True).option( "delimiter", "," ).schema( "continent string, country string, locationId string, longitude double, latitude double," "currentConfirmedCount int, confirmedCount int, suspectedCount int, curedCount int, deadCount int, " "updateTime timestamp").load(country_csv).cache() df.createOrReplaceTempView("COVID_country") register_funcs(spark) # 1 res1 = spark.sql( "select ST_Point(longitude, latitude) as point from COVID_country ") res1.createOrReplaceTempView("res1") res1 = spark.sql("select * from res1 where point != 'POINT (nan nan)' ") res1.show(20, False) vega1 = vega_weighted_pointmap( 3000, 2000, [-289.095983, -73.863121, 289.095983, 73.863121], "#EEEEEE", [2, 60], [6], 1.0, "EPSG:4326") res_png1 = weighted_pointmap(res1, vega1) save_png(res_png1, './COVID_country_weighted_point_map1.png') spark.catalog.dropGlobalTempView("COVID_country")
def draw_world_include_province_weighted_point_map(spark): # 1 df = spark.read.format("csv").option("header", True).option( "delimiter", "," ).schema( "Province string, Country string, Longitude double, Latitude double, ConfirmedCount int," "DeadCount int, CuredCount int, LastUpdateTime string").load( country_with_province_csv).cache() df.createOrReplaceTempView("COVID_country_province") register_funcs(spark) res2 = spark.sql( "select ST_Point(Longitude, Latitude) as point, ConfirmedCount as s from COVID_country_province " "where LastUpdateTime like '%03-29%'") res2.createOrReplaceTempView("res2") res2 = spark.sql("select * from res2 where point != 'POINT (nan nan)' ") vega2 = vega_weighted_pointmap( 3000, 2000, [-289.095983, -73.863121, 289.095983, 73.863121], "#F0356D", [2, 60], [6, 60], 1.0, "EPSG:4326") res_png2 = weighted_pointmap(res2, vega2) save_png(res_png2, './COVID_country_weighted_point_map2.png') spark.catalog.dropGlobalTempView("COVID_country_province")
def test_weighted_point_map(): x_data = [] y_data = [] c_data = [] s_data = [] x_data.append(-73.96524) x_data.append(-73.96118) x_data.append(-73.97324) x_data.append(-73.98456) y_data.append(40.73747) y_data.append(40.74507) y_data.append(40.75890) y_data.append(40.77654) c_data.append(1) c_data.append(2) c_data.append(3) c_data.append(4) s_data.append(4) s_data.append(6) s_data.append(8) s_data.append(10) arr_x = pandas.Series(x_data) arr_y = pandas.Series(y_data) points = arctern.ST_Point(arr_x, arr_y) arr_c = pandas.Series(c_data) arr_s = pandas.Series(s_data) vega1 = vega_weighted_pointmap(1024, 896, bounding_box=[-73.998427, 40.730309, -73.954348, 40.780816], color_gradient=["#0000FF"], opacity=1.0, coordinate_system="EPSG:4326") res1 = arctern.weighted_point_map_layer(vega1, points) save_png(res1, "/tmp/test_weighted_0_0.png") vega2 = vega_weighted_pointmap(1024, 896, bounding_box=[-73.998427, 40.730309, -73.954348, 40.780816], color_gradient=["#0000FF", "#FF0000"], color_bound=[1, 5], opacity=1.0, coordinate_system="EPSG:4326") res2 = arctern.weighted_point_map_layer(vega2, points, color_weights=arr_c) save_png(res2, "/tmp/test_weighted_1_0.png") vega3 = vega_weighted_pointmap(1024, 896, bounding_box=[-73.998427, 40.730309, -73.954348, 40.780816], color_gradient=["#0000FF"], size_bound=[1, 10], opacity=1.0, coordinate_system="EPSG:4326") res3 = arctern.weighted_point_map_layer(vega3, points, size_weights=arr_s) save_png(res3, "/tmp/test_weighted_0_1.png") vega4 = vega_weighted_pointmap(1024, 896, bounding_box=[-73.998427, 40.730309, -73.954348, 40.780816], color_gradient=["#0000FF", "#FF0000"], color_bound=[1, 5], size_bound=[1, 10], opacity=1.0, coordinate_system="EPSG:4326") res4 = arctern.weighted_point_map_layer(vega4, points, color_weights=arr_c, size_weights=arr_s) save_png(res4, "/tmp/test_weighted_1_1.png")
def test_vega_weighted_pointmap(): vega = vega_weighted_pointmap( 1900, 1410, [-73.998427, 40.730309, -73.954348, 40.780816], ["#2DEF4A"], [2, 5], [1, 10], 0.5, "EPSG:3857", "sum").build() vega_dict = json.loads(vega) assert vega_dict["width"] == 1900 assert vega_dict["height"] == 1410 assert vega_dict["marks"][0]["encode"]["enter"]["bounding_box"]["value"][ 0] == -73.998427 assert vega_dict["marks"][0]["encode"]["enter"]["bounding_box"]["value"][ 1] == 40.730309 assert vega_dict["marks"][0]["encode"]["enter"]["bounding_box"]["value"][ 2] == -73.954348 assert vega_dict["marks"][0]["encode"]["enter"]["bounding_box"]["value"][ 3] == 40.780816 assert vega_dict["marks"][0]["encode"]["enter"]["color_gradient"]["value"][ 0] == "#2DEF4A" assert vega_dict["marks"][0]["encode"]["enter"]["color_bound"]["value"][ 0] == 2 assert vega_dict["marks"][0]["encode"]["enter"]["color_bound"]["value"][ 1] == 5 assert vega_dict["marks"][0]["encode"]["enter"]["size_bound"]["value"][ 0] == 1 assert vega_dict["marks"][0]["encode"]["enter"]["size_bound"]["value"][ 1] == 10 assert vega_dict["marks"][0]["encode"]["enter"]["opacity"]["value"] == 0.5 assert vega_dict["marks"][0]["encode"]["enter"]["coordinate_system"][ "value"] == "EPSG:3857" assert vega_dict["marks"][0]["encode"]["enter"]["aggregation_type"][ "value"] == "sum" vega = vega_weighted_pointmap( 1900, 1410, [-73.998427, 40.730309, -73.954348, 40.780816], ["#2DEF4A"]).build() vega_dict = json.loads(vega) assert vega_dict["marks"][0]["encode"]["enter"]["color_bound"]["value"][ 0] == 0 assert vega_dict["marks"][0]["encode"]["enter"]["color_bound"]["value"][ 1] == 0 assert vega_dict["marks"][0]["encode"]["enter"]["size_bound"]["value"][ 0] == 3 assert vega_dict["marks"][0]["encode"]["enter"]["opacity"]["value"] == 1.0 assert vega_dict["marks"][0]["encode"]["enter"]["coordinate_system"][ "value"] == "EPSG:3857" assert vega_dict["marks"][0]["encode"]["enter"]["aggregation_type"][ "value"] == "max"
def weighted_pointmap(ax, points, color_weights=None, size_weights=None, bounding_box=None, color_gradient=["#115f9a", "#d0f400"], color_bound=[0, 0], size_bound=[3], opacity=1.0, coordinate_system='EPSG:3857', **extra_contextily_params): """ :type ax: AxesSubplot :param ax: Matplotlib axes object on which to add the basemap. :type points: Series(dtype: object) :param points: Points in WKB form :type bounding_box: (float, float, float, float) :param bounding_box: The bounding rectangle, as a [left, upper, right, lower]-tuple. value should be of :coordinate_system: :type point_szie: int :param point_size: size of point :type opacity: float :param opacity: opacity of point :type coordinate_system: str :param coordinate_system: either 'EPSG:4326' or 'EPSG:3857' :type extra_contextily_params: dict :param extra_contextily_params: extra parameters for contextily.add_basemap. See https://contextily.readthedocs.io/en/latest/reference.html """ from matplotlib import pyplot as plt import contextily as cx bbox = _transform_bbox(bounding_box, coordinate_system, 'epsg:3857') w, h = _get_recom_size(bbox[2] - bbox[0], bbox[3] - bbox[1]) vega = vega_weighted_pointmap(w, h, bounding_box=bounding_box, color_gradient=color_gradient, color_bound=color_bound, size_bound=size_bound, opacity=opacity, coordinate_system=coordinate_system) hexstr = arctern.weighted_point_map_layer(vega, points, color_weights=color_weights, size_weights=size_weights) f = io.BytesIO(base64.b64decode(hexstr)) img = plt.imread(f) ax.set(xlim=(bbox[0], bbox[2]), ylim=(bbox[1], bbox[3])) cx.add_basemap(ax, **extra_contextily_params) ax.imshow(img, alpha=img[:, :, 3], extent=(bbox[0], bbox[2], bbox[1], bbox[3]))
def weighted_pointmap(ax, points, color_weights=None, size_weights=None, bounding_box=None, color_gradient=["#115f9a", "#d0f400"], color_bound=[0, 0], size_bound=[3], opacity=1.0, coordinate_system='EPSG:3857', **extra_contextily_params): """ Plot weighted pointmap in Matplotlib :type ax: AxesSubplot :param ax: Matplotlib axes object on which to add the basemap. :type points: GeoSeries :param points: Sequence of Points :type color_weights: Series(dtype: float|int64) :param color_weights: Weights for point color, default as None :type size_weights: Series(dtype: float|int64) :param size_weights: Weights for point size, deciding diameter of point (after bounded by size_bound) Default as None :type bounding_box: list :param bounding_box: Specify the bounding rectangle [west, south, east, north], Default as None :type color_gradient: list :param color_gradient: Specify range of color gradient. Either use ["hex_color"] to specify a same color for all points, or ["hex_color1", "hex_color2"] to specify a color gradient ranging from "hex_color1" to "hex_color2" Default as ["#115f9a", "#d0f400"] :type color_bound: list :param color_bound: Specify weight range [w1, w2] binding to color_gradient. Needed only when color_gradient has two value ["color1", "color2"]. Bind w1 to "color1", and w2 to "color2". When weight < w1 or weight > w2, truncate to w1/w2 accordingly. Default as [0, 0] :type size_bound: list :param size_bound: Specify range [w1, w2] of size_weights. When weight < w1 or weight > w2, truncate to w1/w2 accordingly. Default as [3] :type opacity: float :param opacity: Opacity of point, ranged from 0.0 to 1.0, default as 1.0 :type coordinate_system: str :param coordinate_system: Coordinate Reference System of the geometry objects. Must be SRID formed, e.g. 'EPSG:4326' or 'EPSG:3857' Default as 'EPSG:3857' :type extra_contextily_params: dict :param extra_contextily_params: Extra parameters will be passed to contextily.add_basemap. See https://contextily.readthedocs.io/en/latest/reference.html for details :example: >>> import pandas as pd >>> import numpy as np >>> import arctern >>> import matplotlib.pyplot as plt >>> # read from test.csv >>> # Download link: https://raw.githubusercontent.com/arctern-io/arctern-resources/benchmarks/benchmarks/dataset/layer_rendering_test_data/test_data.csv >>> df = pd.read_csv("/path/to/test_data.csv", dtype={'longitude':np.float64, 'latitude':np.float64, 'color_weights':np.float64, 'size_weights':np.float64, 'region_boundaries':np.object}) >>> points = arctern.GeoSeries.point(df['longitude'], df['latitude']) >>> >>> # plot weighted pointmap with variable color and fixed size >>> fig, ax = plt.subplots(figsize=(10, 6), dpi=200) >>> arctern.plot.weighted_pointmap(ax, points, color_weights=df['color_weights'], bounding_box=[-73.99668712186558,40.72972339069935,-73.99045479584949,40.7345193345495], color_gradient=["#115f9a", "#d0f400"], color_bound=[2.5,15], size_bound=[16], opacity=1.0, coordinate_system="EPSG:4326") >>> plt.show() >>> >>> # plot weighted pointmap with fixed color and variable size >>> fig, ax = plt.subplots(figsize=(10, 6), dpi=200) >>> arctern.plot.weighted_pointmap(ax, points, size_weights=df['size_weights'], bounding_box=[-73.99668712186558,40.72972339069935,-73.99045479584949,40.7345193345495], color_gradient=["#37A2DA"], size_bound=[15, 50], opacity=1.0, coordinate_system="EPSG:4326") >>> plt.show() >>> >>> # plot weighted pointmap with variable color and size >>> fig, ax = plt.subplots(figsize=(10, 6), dpi=200) >>> arctern.plot.weighted_pointmap(ax, points, color_weights=df['color_weights'], size_weights=df['size_weights'], bounding_box=[-73.99668712186558,40.72972339069935,-73.99045479584949,40.7345193345495], color_gradient=["#115f9a", "#d0f400"], color_bound=[2.5,15], size_bound=[15, 50], opacity=1.0, coordinate_system="EPSG:4326") >>> plt.show() """ from matplotlib import pyplot as plt import contextily as cx bbox = _transform_bbox(bounding_box, coordinate_system, 'epsg:3857') w, h = _get_recom_size(bbox[2] - bbox[0], bbox[3] - bbox[1]) vega = vega_weighted_pointmap(w, h, bounding_box=bounding_box, color_gradient=color_gradient, color_bound=color_bound, size_bound=size_bound, opacity=opacity, coordinate_system=coordinate_system) hexstr = arctern.weighted_point_map_layer(vega, points, color_weights=color_weights, size_weights=size_weights) f = io.BytesIO(base64.b64decode(hexstr)) img = plt.imread(f) ax.set(xlim=(bbox[0], bbox[2]), ylim=(bbox[1], bbox[3])) cx.add_basemap(ax, **extra_contextily_params) ax.imshow(img, alpha=img[:, :, 3], extent=(bbox[0], bbox[2], bbox[1], bbox[3])) ax.axis('off')
def test_weighted_point_map(): x_data = [] y_data = [] c_data = [] s_data = [] x_data.append(10) x_data.append(20) x_data.append(30) x_data.append(40) x_data.append(50) y_data.append(10) y_data.append(20) y_data.append(30) y_data.append(40) y_data.append(50) c_data.append(1) c_data.append(2) c_data.append(3) c_data.append(4) c_data.append(5) s_data.append(2) s_data.append(4) s_data.append(6) s_data.append(8) s_data.append(10) arr_x = pandas.Series(x_data) arr_y = pandas.Series(y_data) arr_c = pandas.Series(c_data) arr_s = pandas.Series(s_data) vega1 = vega_weighted_pointmap( 300, 200, [-73.998427, 40.730309, -73.954348, 40.780816], "#87CEEB", [1, 5], [5], 1.0, "EPSG:3857") vega_json1 = vega1.build() res1 = arctern.weighted_point_map(arr_x, arr_y, vega_json1.encode('utf-8')) save_png(res1, "/tmp/test_weighted_0_0.png") vega2 = vega_weighted_pointmap( 300, 200, [-73.998427, 40.730309, -73.954348, 40.780816], "blue_to_red", [1, 5], [5], 1.0, "EPSG:3857") vega_json2 = vega2.build() res2 = arctern.weighted_point_map(arr_x, arr_y, vega_json2.encode('utf-8'), cs=arr_c) save_png(res2, "/tmp/test_weighted_1_0.png") vega3 = vega_weighted_pointmap( 300, 200, [-73.998427, 40.730309, -73.954348, 40.780816], "#87CEEB", [1, 5], [1, 10], 1.0, "EPSG:3857") vega_json3 = vega3.build() res3 = arctern.weighted_point_map(arr_x, arr_y, vega_json3.encode('utf-8'), ss=arr_s) save_png(res3, "/tmp/test_weighted_0_1.png") vega4 = vega_weighted_pointmap( 300, 200, [-73.998427, 40.730309, -73.954348, 40.780816], "blue_to_red", [1, 5], [1, 10], 1.0, "EPSG:3857") vega_json4 = vega4.build() res4 = arctern.weighted_point_map(arr_x, arr_y, vega_json4.encode('utf-8'), cs=arr_c, ss=arr_s) save_png(res4, "/tmp/test_weighted_1_1.png")
def db_query(): """ /db/query handler """ log.INSTANCE.info('POST /db/query: {}'.format(request.json)) if not utils.check_json(request.json, 'id') \ or not utils.check_json(request.json, 'query') \ or not utils.check_json(request.json['query'], 'type') \ or not utils.check_json(request.json['query'], 'sql'): return jsonify(status='error', code=-1, message='query format error') query_sql = request.json['query']['sql'] query_type = request.json['query']['type'] content = {} content['sql'] = query_sql content['err'] = False db_instance = db.CENTER.get(str(request.json['id']), None) if db_instance is None: return jsonify(status="error", code=-1, message='there is no database whose id equal to ' + str(request.json['id'])) if query_type == 'sql': res = db_instance.run_for_json(query_sql) data = [] for row in res: obj = json.loads(row) data.append(obj) content['result'] = data else: if not utils.check_json(request.json['query'], 'params'): return jsonify(status='error', code=-1, message='query format error') query_params = request.json['query']['params'] res = db_instance.run(query_sql) if query_type == 'point': vega = vega_pointmap(int(query_params['width']), int(query_params['height']), query_params['point']['bounding_box'], int(query_params['point']['point_size']), query_params['point']['point_color'], float(query_params['point']['opacity']), query_params['point']['coordinate_system']) data = pointmap(vega, res) content['result'] = data elif query_type == 'heat': vega = vega_heatmap(int(query_params['width']), int(query_params['height']), query_params['heat']['bounding_box'], float(query_params['heat']['map_zoom_level']), query_params['heat']['coordinate_system'], query_params['heat']['aggregation_type']) data = heatmap(vega, res) content['result'] = data elif query_type == 'choropleth': vega = vega_choroplethmap( int(query_params['width']), int(query_params['height']), query_params['choropleth']['bounding_box'], query_params['choropleth']['color_gradient'], query_params['choropleth']['color_bound'], float(query_params['choropleth']['opacity']), query_params['choropleth']['coordinate_system'], query_params['choropleth']['aggregation_type']) data = choroplethmap(vega, res) content['result'] = data elif query_type == 'weighted': vega = vega_weighted_pointmap( int(query_params['width']), int(query_params['height']), query_params['weighted']['bounding_box'], query_params['weighted']['color_gradient'], query_params['weighted']['color_bound'], query_params['weighted']['size_bound'], float(query_params['weighted']['opacity']), query_params['weighted']['coordinate_system']) data = weighted_pointmap(vega, res) content['result'] = data elif query_type == 'icon': vega = vega_icon(int(query_params['width']), int(query_params['height']), query_params['icon']['bounding_box'], query_params['icon']['icon_path'], query_params['icon']['coordinate_system']) data = icon_viz(vega, res) content['result'] = data else: return jsonify(status="error", code=-1, message='{} not support'.format(query_type)) return jsonify(status="success", code=200, data=content)
vega = vega_pointmap(1024, 384, bounding_box=[pos1[0], pos1[1], pos2[0], pos2[1]], point_size=10, point_color="#2DEF4A", opacity=1, coordinate_system="EPSG:4326") png = point_map_layer( vega, ST_Point(pickup_df.pickup_longitude, pickup_df.pickup_latitude)) save_png(png, '/tmp/arctern_pointmap_pandas.png') vega = vega_weighted_pointmap( 1024, 384, bounding_box=[pos1[0], pos1[1], pos2[0], pos2[1]], color_gradient=["#115f9a", "#d0f400"], color_bound=[1, 50], size_bound=[3, 15], opacity=1.0, coordinate_system="EPSG:4326") png = weighted_point_map_layer(vega, ST_Point(pickup_df.pickup_longitude, pickup_df.pickup_latitude), color_weights=df.head(limit_num).fare_amount, size_weights=df.head(limit_num).total_amount) save_png(png, "/tmp/arctern_weighted_pointmap_pandas.png") vega = vega_heatmap(1024, 384, bounding_box=[pos1[0], pos1[1], pos2[0], pos2[1]], map_zoom_level=13.0,
def draw_china_weighted_point_map(spark): df = spark.read.format("csv").option("header", True).option( "delimiter", "," ).schema( "continent string, country string, province string, provinceLocationId string, " "provinceCurrentConfirmedCount int , provinceConfirmedCount int, provinceSuspectedCount int," "provinceCuredCount int, provinceDeadCount int, cityName string, longitude double, latitude double," "cityLocationId string, cityCurrentConfirmedCount int, cityConfirmedCount int, citySuspectedCount int," "cityCuredCount int, cityDeadCount int, updateTime timestamp").load( china_csv).cache() spark.catalog.dropGlobalTempView("COVID_china") df.createOrReplaceTempView("COVID_china") register_funcs(spark) # 1 res1 = spark.sql( "select ST_Point(longitude, latitude) as point from COVID_china where ST_Within(ST_Point(longitude, latitude), 'POLYGON ((71.604264 17.258977, 137.319408 17.258977, 137.319408 53.808533, 71.604264 53.808533, 71.604264 17.258977))')" ) res1.createOrReplaceTempView("res1") res1 = spark.sql("select * from res1 where point != 'POINT (nan nan)' ") vega1 = vega_weighted_pointmap( 1024, 896, [71.604264, 17.258977, 137.319408, 53.808533], "#EEEEEE", [2, 60], [6], 1.0, "EPSG:4326") res_png1 = weighted_pointmap(res1, vega1) save_png(res_png1, './COVID_china_weighted_point_map1.png') # 2 res2 = spark.sql( "select ST_Point(longitude, latitude) as point, provinceConfirmedCount as c from COVID_china " "where ST_Within(ST_Point(longitude, latitude), " "'POLYGON ((71.604264 17.258977, 137.319408 17.258977, 137.319408 53.808533," " 71.604264 53.808533, 71.604264 17.258977))')") res2.createOrReplaceTempView("res2") res2 = spark.sql("select * from res2 where point != 'POINT (nan nan)' ") vega2 = vega_weighted_pointmap( 1024, 896, [71.604264, 17.258977, 137.319408, 53.808533], "blue_to_red", [2, 1000], [6], 1.0, "EPSG:4326") res_png2 = weighted_pointmap(res2, vega2) save_png(res_png2, './COVID_china_weighted_point_map2.png') # 3 res3 = spark.sql( "select ST_Point(longitude, latitude) as point, provinceConfirmedCount as c, " "provinceConfirmedCount as s from COVID_china " "where ST_Within(ST_Point(longitude, latitude), " "'POLYGON ((71.604264 17.258977, 137.319408 17.258977, 137.319408 53.808533," " 71.604264 53.808533, 71.604264 17.258977))')") res3.createOrReplaceTempView("res3") res3 = spark.sql("select * from res3 where point != 'POINT (nan nan)' ") vega3 = vega_weighted_pointmap( 3000, 2000, [71.604264, 17.258977, 137.319408, 53.808533], "blue_to_red", [2, 1000], [5, 1000], 1.0, "EPSG:4326") res_png3 = weighted_pointmap(res3, vega3) save_png(res_png3, './COVID_china_weighted_point_map3.png') spark.catalog.dropGlobalTempView("COVID_china")
def weighted_pointmap(ax, points, color_weights=None, size_weights=None, bounding_box=None, color_gradient=["#115f9a", "#d0f400"], color_bound=[0, 0], size_bound=[3], opacity=1.0, coordinate_system='EPSG:3857', **extra_contextily_params): """ Plots a weighted point map in Matplotlib. Parameters ---------- ax : matplotlib.axes.Axes Axes where geometries will be plotted. points : GeoSeries Sequence of points. color_weights : Series, optional Weights of point color. size_weights : Series, optional Weights of point size. bounding_box : list Bounding box of the map. For example, [west, south, east, north]. color_gradient : list, optional Range of color gradient, by default ["#115f9a", "#d0f400"]. Either use ["hex_color"] to specify a same color for all geometries, or ["hex_color1", "hex_color2"] to specify a color gradient ranging from "hex_color1" to "hex_color2". color_bound : list, optional Weight range [w1, w2] of ``color_gradient``, by default [0, 0]. Needed only when ``color_gradient`` has two values ["color1", "color2"]. Binds w1 to "color1", and w2 to "color2". When weight < w1 or weight > w2, the weight will be truncated to w1 or w2 accordingly. size_bound : list, optional Weight range [w1, w2] of ``size_weights``, by default [3]. When weight < w1 or weight > w2, the weight will be truncated to w1 or w2 accordingly. opacity : float, optional Opacity of points, ranged from 0.0 to 1.0, by default 1.0. coordinate_system : str, optional The Coordinate Reference System (CRS) set to all geometries, by default 'EPSG:3857'. Only supports SRID as a WKT representation of CRS by now. **extra_contextily_params: dict Extra parameters passed to `contextily.add_basemap. <https://contextily.readthedocs.io/en/latest/reference.html>`_ Examples ------- >>> import pandas as pd >>> import numpy as np >>> import arctern >>> import matplotlib.pyplot as plt >>> # read from test.csv >>> # Download link: https://raw.githubusercontent.com/arctern-io/arctern-resources/benchmarks/benchmarks/dataset/layer_rendering_test_data/test_data.csv >>> df = pd.read_csv("/path/to/test_data.csv", dtype={'longitude':np.float64, 'latitude':np.float64, 'color_weights':np.float64, 'size_weights':np.float64, 'region_boundaries':np.object}) >>> points = arctern.GeoSeries.point(df['longitude'], df['latitude']) >>> >>> # plot weighted pointmap with variable color and fixed size >>> fig, ax = plt.subplots(figsize=(10, 6), dpi=200) >>> arctern.plot.weighted_pointmap(ax, points, color_weights=df['color_weights'], bounding_box=[-73.99668712186558,40.72972339069935,-73.99045479584949,40.7345193345495], color_gradient=["#115f9a", "#d0f400"], color_bound=[2.5,15], size_bound=[16], opacity=1.0, coordinate_system="EPSG:4326") >>> plt.show() >>> >>> # plot weighted pointmap with fixed color and variable size >>> fig, ax = plt.subplots(figsize=(10, 6), dpi=200) >>> arctern.plot.weighted_pointmap(ax, points, size_weights=df['size_weights'], bounding_box=[-73.99668712186558,40.72972339069935,-73.99045479584949,40.7345193345495], color_gradient=["#37A2DA"], size_bound=[15, 50], opacity=1.0, coordinate_system="EPSG:4326") >>> plt.show() >>> >>> # plot weighted pointmap with variable color and size >>> fig, ax = plt.subplots(figsize=(10, 6), dpi=200) >>> arctern.plot.weighted_pointmap(ax, points, color_weights=df['color_weights'], size_weights=df['size_weights'], bounding_box=[-73.99668712186558,40.72972339069935,-73.99045479584949,40.7345193345495], color_gradient=["#115f9a", "#d0f400"], color_bound=[2.5,15], size_bound=[15, 50], opacity=1.0, coordinate_system="EPSG:4326") >>> plt.show() """ from matplotlib import pyplot as plt import contextily as cx bbox = _transform_bbox(bounding_box, coordinate_system, 'epsg:3857') w, h = _get_recom_size(bbox[2] - bbox[0], bbox[3] - bbox[1]) vega = vega_weighted_pointmap(w, h, bounding_box=bounding_box, color_gradient=color_gradient, color_bound=color_bound, size_bound=size_bound, opacity=opacity, coordinate_system=coordinate_system) hexstr = arctern.weighted_point_map_layer(vega, points, color_weights=color_weights, size_weights=size_weights) f = io.BytesIO(base64.b64decode(hexstr)) img = plt.imread(f) ax.set(xlim=(bbox[0], bbox[2]), ylim=(bbox[1], bbox[3])) cx.add_basemap(ax, **extra_contextily_params) ax.imshow(img, alpha=img[:, :, 3], extent=(bbox[0], bbox[2], bbox[1], bbox[3])) ax.axis('off')