def test_vega_pointmap(): vega = vega_pointmap(1900, 1410, [-73.998427, 40.730309, -73.954348, 40.780816], 3, "#2DEF4A", 0.5, "EPSG:3857").build() vega_dict = json.loads(vega) assert vega_dict["width"] == 1900 assert vega_dict["height"] == 1410 assert vega_dict["marks"][0]["encode"]["enter"]["bounding_box"]["value"][ 0] == -73.998427 assert vega_dict["marks"][0]["encode"]["enter"]["bounding_box"]["value"][ 1] == 40.730309 assert vega_dict["marks"][0]["encode"]["enter"]["bounding_box"]["value"][ 2] == -73.954348 assert vega_dict["marks"][0]["encode"]["enter"]["bounding_box"]["value"][ 3] == 40.780816 assert vega_dict["marks"][0]["encode"]["enter"]["shape"][ "value"] == "circle" assert vega_dict["marks"][0]["encode"]["enter"]["point_color"][ "value"] == "#2DEF4A" assert vega_dict["marks"][0]["encode"]["enter"]["point_size"]["value"] == 3 assert vega_dict["marks"][0]["encode"]["enter"]["opacity"]["value"] == 0.5 assert vega_dict["marks"][0]["encode"]["enter"]["coordinate_system"][ "value"] == "EPSG:3857" vega = vega_pointmap(1900, 1410, [-73.998427, 40.730309, -73.954348, 40.780816], 3, "#2DEF4A", 0.5).build() vega_dict = json.loads(vega) assert vega_dict["marks"][0]["encode"]["enter"]["coordinate_system"][ "value"] == "EPSG:4326"
def test_point_map(): x_data = [] y_data = [] x_data.append(-73.96524) x_data.append(-73.96118) x_data.append(-73.97324) x_data.append(-73.98456) y_data.append(40.73747) y_data.append(40.74507) y_data.append(40.75890) y_data.append(40.77654) arr_x = pandas.Series(x_data) arr_y = pandas.Series(y_data) points = arctern.ST_Point(arr_x, arr_y) vega = vega_pointmap( 1024, 896, bounding_box=[-73.998427, 40.730309, -73.954348, 40.780816], point_size=10, point_color="#0000FF", opacity=1.0, coordinate_system="EPSG:4326") curve_z1 = arctern.point_map_layer(vega, points) save_png(curve_z1, "/tmp/test_curve_z1.png")
def test_point_map(): x_data = [] y_data = [] # y = 150 for i in range(100, 200): x_data.append(i) y_data.append(150) # y = x - 50 for i in range(100, 200): x_data.append(i) y_data.append(i - 50) # y = 50 for i in range(100, 200): x_data.append(i) y_data.append(50) arr_x = pandas.Series(x_data) arr_y = pandas.Series(y_data) vega_circle2d = vega_pointmap( 1024, 896, [-73.998427, 40.730309, -73.954348, 40.780816], 3, "#2DEF4A", 0.5, "EPSG:43") vega_json = vega_circle2d.build() curve_z1 = arctern.point_map(arr_x, arr_y, vega_json.encode('utf-8')) save_png(curve_z1, "/tmp/test_curve_z1.png")
def draw_point_map(spark): start_time = time.time() # file 0_5M_nyc_taxi_and_building.csv could be obtained from arctern-turoial warehouse under zilliztech account. The link on github is https://github.com/zilliztech/arctern-tutorial df = spark.read.format("csv").option("header", True).option( "delimiter", "," ).schema( "VendorID string, tpep_pickup_datetime timestamp, tpep_dropoff_datetime timestamp, passenger_count long, trip_distance double, pickup_longitude double, pickup_latitude double, dropoff_longitude double, dropoff_latitude double, fare_amount double, tip_amount double, total_amount double, buildingid_pickup long, buildingid_dropoff long, buildingtext_pickup string, buildingtext_dropoff string" ).load("file:///tmp/0_5M_nyc_taxi_and_building.csv").cache() df.createOrReplaceTempView("nyc_taxi") register_funcs(spark) res = spark.sql( "select ST_Point(pickup_longitude, pickup_latitude) as point from nyc_taxi where ST_Within(ST_Point(pickup_longitude, pickup_latitude), ST_GeomFromText('POLYGON ((-73.998427 40.730309, -73.954348 40.730309, -73.954348 40.780816 ,-73.998427 40.780816, -73.998427 40.730309))'))" ) vega = vega_pointmap( 1024, 896, bounding_box=[-73.998427, 40.730309, -73.954348, 40.780816], point_size=3, point_color="#2DEF4A", opacity=0.5, coordinate_system="EPSG:4326") res = pointmap(vega, res) save_png(res, '/tmp/pointmap.png') spark.sql("show tables").show() spark.catalog.dropGlobalTempView("nyc_taxi") print("--- %s seconds ---" % (time.time() - start_time))
def test_point_map(): x_data = [] y_data = [] # y = 150 for i in range(100, 200): x_data.append(i) y_data.append(150) # y = x - 50 for i in range(100, 200): x_data.append(i) y_data.append(i - 50) # y = 50 for i in range(100, 200): x_data.append(i) y_data.append(50) arr_x = pandas.Series(x_data) arr_y = pandas.Series(y_data) points = arctern.ST_Point(arr_x, arr_y) vega = vega_pointmap( 1024, 896, bounding_box=[-73.998427, 40.730309, -73.954348, 40.780816], point_size=3, point_color="#2DEF4A", opacity=0.5, coordinate_system="EPSG:4326") curve_z1 = arctern.point_map(vega, points) save_png(curve_z1, "/tmp/test_curve_z1.png")
def test_point_map(): x_data = [] y_data = [] # y = 150 for i in range(100, 200): x_data.append(i) y_data.append(150) # y = x - 50 for i in range(100, 200): x_data.append(i) y_data.append(i - 50) # y = 50 for i in range(100, 200): x_data.append(i) y_data.append(50) arr_x = pandas.Series(x_data) arr_y = pandas.Series(y_data) points = arctern.ST_Point(arr_x, arr_y) vega = vega_pointmap(1024, 896, [-73.998427, 40.730309, -73.954348, 40.780816], 3, "#2DEF4A", 0.5, "EPSG:4326") curve_z1 = arctern.point_map(vega, points) save_png(curve_z1, "/tmp/test_curve_z1.png")
def plot_pointmap(ax, points, bounding_box, coordinate_system='EPSG:4326', point_size=3, point_color='red', point_opacity=1.0, **extra_contextily_params): """ :type ax: AxesSubplot :param ax: Matplotlib axes object on which to add the basemap. :type points: Series(dtype: object) :param points: Points in WKB form :type bounding_box: (float, float, float, float) :param bounding_box: The bounding rectangle, as a [left, upper, right, lower]-tuple. value should be of :coordinate_system: :type coordinate_system: str :param coordinate_system: either 'EPSG:4326' or 'EPSG:3857' :type point_szie: int :param point_size: size of point :type point_color: str :param point_color: specify color, using matplotlib.colors :type opacity: float :param opacity: opacity of point :type extra_contextily_params: dict :param extra_contextily_params: extra parameters for contextily.add_basemap. See https://contextily.readthedocs.io/en/latest/reference.html """ from matplotlib import colors, pyplot as plt import contextily as cx bbox = _transform_bbox(bounding_box, coordinate_system, 'epsg:3857') color_hex = colors.to_hex(point_color) w, h = _get_recom_size(bbox[2] - bbox[0], bbox[3] - bbox[1]) vega = vega_pointmap(w, h, bounding_box=bounding_box, point_size=point_size, point_color=color_hex, opacity=point_opacity, coordinate_system=coordinate_system) hexstr = arctern.point_map_layer(vega, points) f = io.BytesIO(base64.b64decode(hexstr)) img = plt.imread(f) ax.set(xlim=(bbox[0], bbox[2]), ylim=(bbox[1], bbox[3])) cx.add_basemap(ax, **extra_contextily_params) ax.imshow(img, alpha=img[:, :, 3], extent=(bbox[0], bbox[2], bbox[1], bbox[3]))
def draw_point_map(spark): # file 0_5M_nyc_build.csv is generated from New York taxi data and taxi zone shapefile. Data is available at the following URL: https://www1.nyc.gov/site/tlc/about/tlc-trip-record-data.page df = spark.read.format("csv").option("header", True).option( "delimiter", "," ).schema( "VendorID string, tpep_pickup_datetime timestamp, tpep_dropoff_datetime timestamp, passenger_count long, trip_distance double, pickup_longitude double, pickup_latitude double, dropoff_longitude double, dropoff_latitude double, fare_amount double, tip_amount double, total_amount double, buildingid_pickup long, buildingid_dropoff long, buildingtext_pickup string, buildingtext_dropoff string" ).load(data_path).cache() df.show(20, False) df.createOrReplaceTempView("nyc_taxi") register_funcs(spark) res = spark.sql( "select ST_Point(pickup_longitude, pickup_latitude) as point from nyc_taxi where ST_Within(ST_Point(pickup_longitude, pickup_latitude), 'POLYGON ((-73.998427 40.730309, -73.954348 40.730309, -73.954348 40.780816 ,-73.998427 40.780816, -73.998427 40.730309))')" ) vega = vega_pointmap(1024, 896, [-73.998427, 40.730309, -73.954348, 40.780816], 3, "#2DEF4A", 0.5, "EPSG:4326") res = pointmap(res, vega) save_png(res, '/tmp/pointmap.png') spark.sql("show tables").show() spark.catalog.dropGlobalTempView("nyc_taxi")
def pointmap(ax, points, bounding_box, point_size=3, point_color='#115f9a', opacity=1.0, coordinate_system='EPSG:3857', **extra_contextily_params): """ Plot pointmap in Matplotlib :type ax: AxesSubplot :param ax: Matplotlib axes object on which to add the basemap. :type points: GeoSeries :param points: Sequence of Points :type bounding_box: list :param bounding_box: Specify the bounding rectangle [west, south, east, north]. :type point_size: int :param point_size: Diameter of point, default as 3 :type point_color: str :param point_color: Specify point color in Hex Color Code, default as "#115f9a" :type opacity: float :param opacity: Opacity of point, ranged from 0.0 to 1.0, default as 1.0 :type coordinate_system: str :param coordinate_system: Coordinate Reference System of the geometry objects. Must be SRID formed, e.g. 'EPSG:4326' or 'EPSG:3857' Default as 'EPSG:3857' :type extra_contextily_params: dict :param extra_contextily_params: Extra parameters will be passed to contextily.add_basemap. See https://contextily.readthedocs.io/en/latest/reference.html for details :example: >>> import pandas as pd >>> import numpy as np >>> import arctern >>> import matplotlib.pyplot as plt >>> # read from test.csv >>> # Download link: https://raw.githubusercontent.com/arctern-io/arctern-resources/benchmarks/benchmarks/dataset/layer_rendering_test_data/test_data.csv >>> df = pd.read_csv("/path/to/test_data.csv", dtype={'longitude':np.float64, 'latitude':np.float64, 'color_weights':np.float64, 'size_weights':np.float64, 'region_boundaries':np.object}) >>> points = arctern.GeoSeries.point(df['longitude'], df['latitude']) >>> # plot pointmap >>> fig, ax = plt.subplots(figsize=(10, 6), dpi=200) >>> arctern.plot.pointmap(ax, points, [-74.01398981737215,40.71353244267465,-73.96979949831308,40.74480271529791], point_size=10, point_color='#115f9a',coordinate_system="EPSG:4326") >>> plt.show() """ from matplotlib import pyplot as plt import contextily as cx bbox = _transform_bbox(bounding_box, coordinate_system, 'epsg:3857') w, h = _get_recom_size(bbox[2] - bbox[0], bbox[3] - bbox[1]) vega = vega_pointmap(w, h, bounding_box=bounding_box, point_size=point_size, point_color=point_color, opacity=opacity, coordinate_system=coordinate_system) hexstr = arctern.point_map_layer(vega, points) f = io.BytesIO(base64.b64decode(hexstr)) img = plt.imread(f) ax.set(xlim=(bbox[0], bbox[2]), ylim=(bbox[1], bbox[3])) cx.add_basemap(ax, **extra_contextily_params) ax.imshow(img, alpha=img[:, :, 3], extent=(bbox[0], bbox[2], bbox[1], bbox[3])) ax.axis('off')
def db_query(): """ /db/query handler """ log.INSTANCE.info('POST /db/query: {}'.format(request.json)) if not utils.check_json(request.json, 'id') \ or not utils.check_json(request.json, 'query') \ or not utils.check_json(request.json['query'], 'type') \ or not utils.check_json(request.json['query'], 'sql'): return jsonify(status='error', code=-1, message='query format error') query_sql = request.json['query']['sql'] query_type = request.json['query']['type'] content = {} content['sql'] = query_sql content['err'] = False db_instance = db.CENTER.get(str(request.json['id']), None) if db_instance is None: return jsonify(status="error", code=-1, message='there is no database whose id equal to ' + str(request.json['id'])) if query_type == 'sql': res = db_instance.run_for_json(query_sql) data = [] for row in res: obj = json.loads(row) data.append(obj) content['result'] = data else: if not utils.check_json(request.json['query'], 'params'): return jsonify(status='error', code=-1, message='query format error') query_params = request.json['query']['params'] res = db_instance.run(query_sql) if query_type == 'point': vega = vega_pointmap(int(query_params['width']), int(query_params['height']), query_params['point']['bounding_box'], int(query_params['point']['point_size']), query_params['point']['point_color'], float(query_params['point']['opacity']), query_params['point']['coordinate_system']) data = pointmap(vega, res) content['result'] = data elif query_type == 'heat': vega = vega_heatmap(int(query_params['width']), int(query_params['height']), query_params['heat']['bounding_box'], float(query_params['heat']['map_zoom_level']), query_params['heat']['coordinate_system'], query_params['heat']['aggregation_type']) data = heatmap(vega, res) content['result'] = data elif query_type == 'choropleth': vega = vega_choroplethmap( int(query_params['width']), int(query_params['height']), query_params['choropleth']['bounding_box'], query_params['choropleth']['color_gradient'], query_params['choropleth']['color_bound'], float(query_params['choropleth']['opacity']), query_params['choropleth']['coordinate_system'], query_params['choropleth']['aggregation_type']) data = choroplethmap(vega, res) content['result'] = data elif query_type == 'weighted': vega = vega_weighted_pointmap( int(query_params['width']), int(query_params['height']), query_params['weighted']['bounding_box'], query_params['weighted']['color_gradient'], query_params['weighted']['color_bound'], query_params['weighted']['size_bound'], float(query_params['weighted']['opacity']), query_params['weighted']['coordinate_system']) data = weighted_pointmap(vega, res) content['result'] = data elif query_type == 'icon': vega = vega_icon(int(query_params['width']), int(query_params['height']), query_params['icon']['bounding_box'], query_params['icon']['icon_path'], query_params['icon']['coordinate_system']) data = icon_viz(vega, res) content['result'] = data else: return jsonify(status="error", code=-1, message='{} not support'.format(query_type)) return jsonify(status="success", code=200, data=content)
ST_AsText(ST_Point(pickup_df.pickup_longitude, pickup_df.pickup_latitude)).head() ST_AsText( ST_Transform( ST_Point(pickup_df.pickup_longitude, pickup_df.pickup_latitude), 'epsg:4326', 'epsg:3857')).head() from arctern.util import save_png from arctern.util.vega import vega_pointmap, vega_weighted_pointmap, vega_heatmap, vega_choroplethmap, vega_icon, vega_fishnetmap vega = vega_pointmap(1024, 384, bounding_box=[pos1[0], pos1[1], pos2[0], pos2[1]], point_size=10, point_color="#2DEF4A", opacity=1, coordinate_system="EPSG:4326") png = point_map_layer( vega, ST_Point(pickup_df.pickup_longitude, pickup_df.pickup_latitude)) save_png(png, '/tmp/arctern_pointmap_pandas.png') vega = vega_weighted_pointmap( 1024, 384, bounding_box=[pos1[0], pos1[1], pos2[0], pos2[1]], color_gradient=["#115f9a", "#d0f400"], color_bound=[1, 50], size_bound=[3, 15], opacity=1.0,
def pointmap(ax, points, bounding_box, point_size=3, point_color='#115f9a', opacity=1.0, coordinate_system='EPSG:3857', **extra_contextily_params): """ Plots a point map in Matplotlib. Parameters ---------- ax : matplotlib.axes.Axes Axes where geometries will be plotted. points : GeoSeries Sequence of points. bounding_box : list Bounding box of the map. For example, [west, south, east, north]. point_size : int, optional Diameter of points, by default 3. point_color : str, optional Point color in Hex Color Code, by default '#115f9a'. opacity : float, optional Opacity of points, ranged from 0.0 to 1.0, by default 1.0. coordinate_system : str, optional The Coordinate Reference System (CRS) set to all geometries, by default 'EPSG:3857'. Only supports SRID as a WKT representation of CRS by now. **extra_contextily_params: dict Extra parameters passed to `contextily.add_basemap. <https://contextily.readthedocs.io/en/latest/reference.html>`_ Examples ------- >>> import pandas as pd >>> import numpy as np >>> import arctern >>> import matplotlib.pyplot as plt >>> # read from test.csv >>> # Download link: https://raw.githubusercontent.com/arctern-io/arctern-resources/benchmarks/benchmarks/dataset/layer_rendering_test_data/test_data.csv >>> df = pd.read_csv("/path/to/test_data.csv", dtype={'longitude':np.float64, 'latitude':np.float64, 'color_weights':np.float64, 'size_weights':np.float64, 'region_boundaries':np.object}) >>> points = arctern.GeoSeries.point(df['longitude'], df['latitude']) >>> # plot pointmap >>> fig, ax = plt.subplots(figsize=(10, 6), dpi=200) >>> arctern.plot.pointmap(ax, points, [-74.01398981737215,40.71353244267465,-73.96979949831308,40.74480271529791], point_size=10, point_color='#115f9a',coordinate_system="EPSG:4326") >>> plt.show() """ from matplotlib import pyplot as plt import contextily as cx bbox = _transform_bbox(bounding_box, coordinate_system, 'epsg:3857') w, h = _get_recom_size(bbox[2] - bbox[0], bbox[3] - bbox[1]) vega = vega_pointmap(w, h, bounding_box=bounding_box, point_size=point_size, point_color=point_color, opacity=opacity, coordinate_system=coordinate_system) hexstr = arctern.point_map_layer(vega, points) f = io.BytesIO(base64.b64decode(hexstr)) img = plt.imread(f) ax.set(xlim=(bbox[0], bbox[2]), ylim=(bbox[1], bbox[3])) cx.add_basemap(ax, **extra_contextily_params) ax.imshow(img, alpha=img[:, :, 3], extent=(bbox[0], bbox[2], bbox[1], bbox[3])) ax.axis('off')
def db_query(): """ /db/query handler """ if not utils.check_json(request.json, 'id') \ or not utils.check_json(request.json, 'query') \ or not utils.check_json(request.json['query'], 'type') \ or not utils.check_json(request.json['query'], 'sql'): return jsonify(status='error', code=-1, message='query format error') query_sql = request.json['query']['sql'] query_type = request.json['query']['type'] content = {} content['sql'] = query_sql content['err'] = False if query_type == 'sql': res = spark.Spark.run_for_json(query_sql) data = [] for row in res: obj = json.loads(row) data.append(obj) content['result'] = data else: if not utils.check_json(request.json['query'], 'params'): return jsonify(status='error', code=-1, message='query format error') query_params = request.json['query']['params'] res = spark.Spark.run(query_sql) if query_type == 'point': vega = vega_pointmap(int(query_params['width']), int(query_params['height']), query_params['point']['bounding_box'], int(query_params['point']['stroke_width']), query_params['point']['stroke'], float(query_params['point']['opacity']), query_params['point']['coordinate']) data = pointmap(res, vega) content['result'] = data elif query_type == 'heat': vega = vega_heatmap(int(query_params['width']), int(query_params['height']), float(query_params['heat']['map_scale']), query_params['heat']['bounding_box'], query_params['heat']['coordinate']) data = heatmap(res, vega) content['result'] = data elif query_type == 'choropleth': vega = vega_choroplethmap( int(query_params['width']), int(query_params['height']), query_params['choropleth']['bounding_box'], query_params['choropleth']['color_style'], query_params['choropleth']['rule'], float(query_params['choropleth']['opacity']), query_params['choropleth']['coordinate']) data = choroplethmap(res, vega) content['result'] = data else: return jsonify(status="error", code=-1, message='{} not support'.format(query_type)) return jsonify(status="success", code=200, data=content)
def run_test_point_map(spark): # file 0_5M_nyc_taxi_and_building.csv could be obtained from arctern-turoial warehouse under zilliztech account. The link on github is https://github.com/zilliztech/arctern-tutorial # file 0_10000_nyc_taxi_and_building.csv is from file 0_5M_nyc_taxi_and_building.csv first 10000 lines df = spark.read.format("csv").option("header", True).option("delimiter", ",").schema( "VendorID string, tpep_pickup_datetime timestamp, tpep_dropoff_datetime timestamp, passenger_count long, " "trip_distance double, pickup_longitude double, pickup_latitude double, dropoff_longitude double, " "dropoff_latitude double, fare_amount double, tip_amount double, total_amount double, buildingid_pickup long, " "buildingid_dropoff long, buildingtext_pickup string, buildingtext_dropoff string").load( file_path).cache() df.createOrReplaceTempView("nyc_taxi") register_funcs(spark) res = spark.sql( "select ST_Point(pickup_longitude, pickup_latitude) as point from nyc_taxi where ST_Within(ST_Point(pickup_longitude, pickup_latitude), 'POLYGON ((-73.998427 40.730309, -73.954348 40.730309, -73.954348 40.780816 ,-73.998427 40.780816, -73.998427 40.730309))')") # 1 size:1024*896, point_size: 3, opacity: 0.5, color: #2DEF4A(green) vega_1 = vega_pointmap(1024, 896, [-73.998427, 40.730309, -73.954348, 40.780816], 3, "#2DEF4A", 0.5, "EPSG:4326") baseline1 = pointmap(res, vega_1) point_map1_1 = pointmap(res, vega_1) point_map1_2 = pointmap(res, vega_1) baseline_png1 = png_path + "point_map_nyc_1.png" save_png(baseline1, baseline_png1) save_png(point_map1_1, png_path + "test_point_map_nyc_1-1.png") save_png(point_map1_2, png_path + "test_point_map_nyc_1-2.png") # 2 #F50404(red) vega_2 = vega_pointmap(1024, 896, [-73.998427, 40.730309, -73.954348, 40.780816], 5, "#F50404", 0.5, "EPSG:4326") baseline2 = pointmap(res, vega_2) point_map2_1 = pointmap(res, vega_2) point_map2_2 = pointmap(res, vega_2) baseline_png2 = png_path + "point_map_nyc_2.png" save_png(baseline2, baseline_png2) save_png(point_map2_1, png_path + "test_point_map_nyc_2-1.png") save_png(point_map2_2, png_path + "test_point_map_nyc_2-2.png") # 3 color: #1455EE(blue) vega_3 = vega_pointmap(1024, 896, [-73.998427, 40.730309, -73.954348, 40.780816], 5, "#1455EE", 0.5, "EPSG:4326") baseline3 = pointmap(res, vega_3) point_map3_1 = pointmap(res, vega_3) point_map3_2 = pointmap(res, vega_3) baseline_png3 = png_path + "point_map_nyc_3.png" save_png(baseline3, baseline_png3) save_png(point_map3_1, png_path + "test_point_map_nyc_3-1.png") save_png(point_map3_2, png_path + "test_point_map_nyc_3-2.png") # 4 size:1024*896, point_size: 3, opacity: 1, color: #2DEF4A vega_4 = vega_pointmap(1024, 896, [-73.998427, 40.730309, -73.954348, 40.780816], 3, "#2DEF4A", 1.0, "EPSG:4326") baseline4 = pointmap(res, vega_4) point_map4_1 = pointmap(res, vega_4) point_map4_2 = pointmap(res, vega_4) baseline_png4 = png_path + "point_map_nyc_4.png" save_png(baseline4, baseline_png4) save_png(point_map4_1, png_path + "test_point_map_nyc_4-1.png") save_png(point_map4_2, png_path + "test_point_map_nyc_4-2.png") # 5 size:1024*896, point_size: 3, opacity: 0, color: #2DEF4A vega_5 = vega_pointmap(1024, 896, [-73.998427, 40.730309, -73.954348, 40.780816], 3, "#2DEF4A", 0.0, "EPSG:4326") baseline5 = pointmap(res, vega_5) point_map5_1 = pointmap(res, vega_5) point_map5_2 = pointmap(res, vega_5) baseline_png5 = png_path + "point_map_nyc_5.png" save_png(baseline5, baseline_png5) save_png(point_map5_1, png_path + "test_point_map_nyc_5-1.png") save_png(point_map5_2, png_path + "test_point_map_nyc_5-2.png") # 6 size:200*200, point_size: 3, opacity: 0.5, color: #2DEF4A vega_6 = vega_pointmap(200, 200, [-73.998427, 40.730309, -73.954348, 40.780816], 3, "#2DEF4A", 0.5, "EPSG:4326") baseline6 = pointmap(res, vega_6) point_map6_1 = pointmap(res, vega_6) point_map6_2 = pointmap(res, vega_6) baseline_png6 = png_path + "point_map_nyc_6.png" save_png(baseline6, baseline_png6) save_png(point_map6_1, png_path + "test_point_map_nyc_6-1.png") save_png(point_map6_2, png_path + "test_point_map_nyc_6-2.png") spark.catalog.dropGlobalTempView("nyc_taxi") assert run_diff_png(baseline_png1, png_path + "test_point_map_nyc_1-1.png") assert run_diff_png(baseline_png1, png_path + "test_point_map_nyc_1-2.png") assert run_diff_png(baseline_png2, png_path + "test_point_map_nyc_2-1.png") assert run_diff_png(baseline_png2, png_path + "test_point_map_nyc_2-2.png") assert run_diff_png(baseline_png3, png_path + "test_point_map_nyc_3-1.png") assert run_diff_png(baseline_png3, png_path + "test_point_map_nyc_3-2.png") assert run_diff_png(baseline_png4, png_path + "test_point_map_nyc_4-1.png") assert run_diff_png(baseline_png4, png_path + "test_point_map_nyc_4-2.png") assert run_diff_png(baseline_png5, png_path + "test_point_map_nyc_5-1.png") assert run_diff_png(baseline_png5, png_path + "test_point_map_nyc_5-2.png") assert run_diff_png(baseline_png6, png_path + "test_point_map_nyc_6-1.png") assert run_diff_png(baseline_png6, png_path + "test_point_map_nyc_6-2.png")