def load_world_bank_health_n_pop( # pylint: disable=too-many-locals, too-many-statements only_metadata: bool = False, force: bool = False, sample: bool = False, ) -> None: """Loads the world bank health dataset, slices and a dashboard""" tbl_name = "wb_health_population" database = superset.utils.database.get_example_database() engine = database.get_sqla_engine() schema = inspect(engine).default_schema_name table_exists = database.has_table_by_name(tbl_name) if not only_metadata and (not table_exists or force): data = get_example_data("countries.json.gz") pdf = pd.read_json(data) pdf.columns = [col.replace(".", "_") for col in pdf.columns] if database.backend == "presto": pdf.year = pd.to_datetime(pdf.year) pdf.year = pdf.year.dt.strftime("%Y-%m-%d %H:%M%:%S") else: pdf.year = pd.to_datetime(pdf.year) pdf = pdf.head(100) if sample else pdf pdf.to_sql( tbl_name, engine, schema=schema, if_exists="replace", chunksize=50, dtype={ # TODO(bkyryliuk): use TIMESTAMP type for presto "year": DateTime if database.backend != "presto" else String(255), "country_code": String(3), "country_name": String(255), "region": String(255), }, method="multi", index=False, ) print("Creating table [wb_health_population] reference") table = get_table_connector_registry() tbl = db.session.query(table).filter_by(table_name=tbl_name).first() if not tbl: tbl = table(table_name=tbl_name, schema=schema) tbl.description = utils.readfile( os.path.join(get_examples_folder(), "countries.md") ) tbl.main_dttm_col = "year" tbl.database = database tbl.filter_select_enabled = True metrics = [ "sum__SP_POP_TOTL", "sum__SH_DYN_AIDS", "sum__SH_DYN_AIDS", "sum__SP_RUR_TOTL_ZS", "sum__SP_DYN_LE00_IN", "sum__SP_RUR_TOTL", ] for metric in metrics: if not any(col.metric_name == metric for col in tbl.metrics): aggr_func = metric[:3] col = str(column(metric[5:]).compile(db.engine)) tbl.metrics.append( SqlMetric(metric_name=metric, expression=f"{aggr_func}({col})") ) db.session.merge(tbl) db.session.commit() tbl.fetch_metadata() slices = create_slices(tbl) misc_dash_slices.add(slices[-1].slice_name) for slc in slices: merge_slice(slc) print("Creating a World's Health Bank dashboard") dash_name = "World Bank's Data" slug = "world_health" dash = db.session.query(Dashboard).filter_by(slug=slug).first() if not dash: dash = Dashboard() dash.published = True pos = dashboard_positions update_slice_ids(pos, slices) dash.dashboard_title = dash_name dash.position_json = json.dumps(pos, indent=4) dash.slug = slug dash.slices = slices[:-1] db.session.merge(dash) db.session.commit()
def create_dashboard(slices: List[Slice]) -> Dashboard: print("Creating a dashboard") admin = get_admin_user() dash = db.session.query(Dashboard).filter_by(slug="births").first() if not dash: dash = Dashboard() dash.owners = [admin] dash.created_by = admin db.session.add(dash) dash.published = True dash.json_metadata = textwrap.dedent("""\ { "label_colors": { "Girls": "#FF69B4", "Boys": "#ADD8E6", "girl": "#FF69B4", "boy": "#ADD8E6" } }""") # pylint: disable=line-too-long pos = json.loads( textwrap.dedent("""\ { "CHART-6GdlekVise": { "children": [], "id": "CHART-6GdlekVise", "meta": { "chartId": 5547, "height": 50, "sliceName": "Top 10 Girl Name Share", "width": 5 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-eh0w37bWbR" ], "type": "CHART" }, "CHART-6n9jxb30JG": { "children": [], "id": "CHART-6n9jxb30JG", "meta": { "chartId": 5540, "height": 36, "sliceName": "Genders by State", "width": 5 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW--EyBZQlDi" ], "type": "CHART" }, "CHART-Jj9qh1ol-N": { "children": [], "id": "CHART-Jj9qh1ol-N", "meta": { "chartId": 5545, "height": 50, "sliceName": "Boy Name Cloud", "width": 4 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-kzWtcvo8R1" ], "type": "CHART" }, "CHART-ODvantb_bF": { "children": [], "id": "CHART-ODvantb_bF", "meta": { "chartId": 5548, "height": 50, "sliceName": "Top 10 Boy Name Share", "width": 5 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-kzWtcvo8R1" ], "type": "CHART" }, "CHART-PAXUUqwmX9": { "children": [], "id": "CHART-PAXUUqwmX9", "meta": { "chartId": 5538, "height": 34, "sliceName": "Genders", "width": 3 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-2n0XgiHDgs" ], "type": "CHART" }, "CHART-_T6n_K9iQN": { "children": [], "id": "CHART-_T6n_K9iQN", "meta": { "chartId": 5539, "height": 36, "sliceName": "Trends", "width": 7 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW--EyBZQlDi" ], "type": "CHART" }, "CHART-eNY0tcE_ic": { "children": [], "id": "CHART-eNY0tcE_ic", "meta": { "chartId": 5537, "height": 34, "sliceName": "Participants", "width": 3 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-2n0XgiHDgs" ], "type": "CHART" }, "CHART-g075mMgyYb": { "children": [], "id": "CHART-g075mMgyYb", "meta": { "chartId": 5541, "height": 50, "sliceName": "Girls", "width": 3 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-eh0w37bWbR" ], "type": "CHART" }, "CHART-n-zGGE6S1y": { "children": [], "id": "CHART-n-zGGE6S1y", "meta": { "chartId": 5542, "height": 50, "sliceName": "Girl Name Cloud", "width": 4 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-eh0w37bWbR" ], "type": "CHART" }, "CHART-vJIPjmcbD3": { "children": [], "id": "CHART-vJIPjmcbD3", "meta": { "chartId": 5543, "height": 50, "sliceName": "Boys", "width": 3 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-kzWtcvo8R1" ], "type": "CHART" }, "DASHBOARD_VERSION_KEY": "v2", "GRID_ID": { "children": [ "ROW-2n0XgiHDgs", "ROW--EyBZQlDi", "ROW-eh0w37bWbR", "ROW-kzWtcvo8R1" ], "id": "GRID_ID", "parents": [ "ROOT_ID" ], "type": "GRID" }, "HEADER_ID": { "id": "HEADER_ID", "meta": { "text": "Births" }, "type": "HEADER" }, "MARKDOWN-zaflB60tbC": { "children": [], "id": "MARKDOWN-zaflB60tbC", "meta": { "code": "<div style=\\"text-align:center\\"> <h1>Birth Names Dashboard</h1> <img src=\\"/static/assets/images/babies.png\\" style=\\"width:50%;\\"></div>", "height": 34, "width": 6 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-2n0XgiHDgs" ], "type": "MARKDOWN" }, "ROOT_ID": { "children": [ "GRID_ID" ], "id": "ROOT_ID", "type": "ROOT" }, "ROW--EyBZQlDi": { "children": [ "CHART-_T6n_K9iQN", "CHART-6n9jxb30JG" ], "id": "ROW--EyBZQlDi", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": [ "ROOT_ID", "GRID_ID" ], "type": "ROW" }, "ROW-2n0XgiHDgs": { "children": [ "CHART-eNY0tcE_ic", "MARKDOWN-zaflB60tbC", "CHART-PAXUUqwmX9" ], "id": "ROW-2n0XgiHDgs", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": [ "ROOT_ID", "GRID_ID" ], "type": "ROW" }, "ROW-eh0w37bWbR": { "children": [ "CHART-g075mMgyYb", "CHART-n-zGGE6S1y", "CHART-6GdlekVise" ], "id": "ROW-eh0w37bWbR", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": [ "ROOT_ID", "GRID_ID" ], "type": "ROW" }, "ROW-kzWtcvo8R1": { "children": [ "CHART-vJIPjmcbD3", "CHART-Jj9qh1ol-N", "CHART-ODvantb_bF" ], "id": "ROW-kzWtcvo8R1", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": [ "ROOT_ID", "GRID_ID" ], "type": "ROW" } } """)) # pylint: enable=line-too-long # dashboard v2 doesn't allow add markup slice dash.slices = [slc for slc in slices if slc.viz_type != "markup"] update_slice_ids(pos, dash.slices) dash.dashboard_title = "USA Births Names" dash.position_json = json.dumps(pos, indent=4) dash.slug = "births" db.session.commit() return dash
def export(self, **kwargs: Any) -> Response: """Export dashboards --- get: description: >- Exports multiple Dashboards and downloads them as YAML files. parameters: - in: query name: q content: application/json: schema: $ref: '#/components/schemas/get_export_ids_schema' responses: 200: description: Dashboard export content: text/plain: schema: type: string 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 404: $ref: '#/components/responses/404' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500' """ requested_ids = kwargs["rison"] if is_feature_enabled("VERSIONED_EXPORT"): timestamp = datetime.now().strftime("%Y%m%dT%H%M%S") root = f"dashboard_export_{timestamp}" filename = f"{root}.zip" buf = BytesIO() with ZipFile(buf, "w") as bundle: try: for file_name, file_content in ExportDashboardsCommand( requested_ids).run(): with bundle.open(f"{root}/{file_name}", "w") as fp: fp.write(file_content.encode()) except DashboardNotFoundError: return self.response_404() buf.seek(0) return send_file( buf, mimetype="application/zip", as_attachment=True, attachment_filename=filename, ) query = self.datamodel.session.query(Dashboard).filter( Dashboard.id.in_(requested_ids)) query = self._base_filters.apply_all(query) ids = [item.id for item in query.all()] if not ids: return self.response_404() export = Dashboard.export_dashboards(ids) resp = make_response(export, 200) resp.headers["Content-Disposition"] = generate_download_headers( "json")["Content-Disposition"] return resp
def set_dash_metadata( dashboard: Dashboard, data: Dict[Any, Any], old_to_new_slice_ids: Optional[Dict[int, int]] = None, ) -> None: positions = data["positions"] # find slices in the position data slice_ids = [ value.get("meta", {}).get("chartId") for value in positions.values() if isinstance(value, dict) ] session = db.session() current_slices = session.query(Slice).filter(Slice.id.in_(slice_ids)).all() dashboard.slices = current_slices # remove leading and trailing white spaces in the dumped json dashboard.position_json = json.dumps( positions, indent=None, separators=(",", ":"), sort_keys=True ) md = dashboard.params_dict dashboard.css = data.get("css") dashboard.dashboard_title = data["dashboard_title"] if "timed_refresh_immune_slices" not in md: md["timed_refresh_immune_slices"] = [] new_filter_scopes = {} if "filter_scopes" in data: # replace filter_id and immune ids from old slice id to new slice id: # and remove slice ids that are not in dash anymore slc_id_dict: Dict[int, int] = {} if old_to_new_slice_ids: slc_id_dict = { old: new for old, new in old_to_new_slice_ids.items() if new in slice_ids } else: slc_id_dict = {sid: sid for sid in slice_ids} new_filter_scopes = copy_filter_scopes( old_to_new_slc_id_dict=slc_id_dict, old_filter_scopes=json.loads(data["filter_scopes"] or "{}"), ) if new_filter_scopes: md["filter_scopes"] = new_filter_scopes else: md.pop("filter_scopes", None) md["expanded_slices"] = data.get("expanded_slices", {}) md["refresh_frequency"] = data.get("refresh_frequency", 0) default_filters_data = json.loads(data.get("default_filters", "{}")) applicable_filters = { key: v for key, v in default_filters_data.items() if int(key) in slice_ids } md["default_filters"] = json.dumps(applicable_filters) md["color_scheme"] = data.get("color_scheme") if data.get("color_namespace"): md["color_namespace"] = data.get("color_namespace") if data.get("label_colors"): md["label_colors"] = data.get("label_colors") dashboard.json_metadata = json.dumps(md)
def load_tabbed_dashboard(_: bool = False) -> None: """Creating a tabbed dashboard""" print("Creating a dashboard with nested tabs") slug = "tabbed_dash" dash = db.session.query(Dashboard).filter_by(slug=slug).first() if not dash: dash = Dashboard() # reuse charts in "World's Bank Data and create # new dashboard with nested tabs tabbed_dash_slices = set() tabbed_dash_slices.add("Region Filter") tabbed_dash_slices.add("Growth Rate") tabbed_dash_slices.add("Treemap") tabbed_dash_slices.add("Box plot") js = textwrap.dedent("""\ { "CHART-c0EjR-OZ0n": { "children": [], "id": "CHART-c0EjR-OZ0n", "meta": { "chartId": 870, "height": 50, "sliceName": "Box plot", "width": 4 }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "ROW-7G2o5uDvfo" ], "type": "CHART" }, "CHART-dxV7Il74hH": { "children": [], "id": "CHART-dxV7Il74hH", "meta": { "chartId": 797, "height": 50, "sliceName": "Treemap", "width": 4 }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-gcQJxApOZS", "TABS-afnrUvdxYF", "TAB-jNNd4WWar1", "ROW-7ygtDczaQ" ], "type": "CHART" }, "CHART-jJ5Yj1Ptaz": { "children": [], "id": "CHART-jJ5Yj1Ptaz", "meta": { "chartId": 789, "height": 50, "sliceName": "World's Population", "width": 4 }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj", "TAB-z81Q87PD7", "ROW-G73z9PIHn" ], "type": "CHART" }, "CHART-z4gmEuCqQ5": { "children": [], "id": "CHART-z4gmEuCqQ5", "meta": { "chartId": 788, "height": 50, "sliceName": "Region Filter", "width": 4 }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj", "TAB-EcNm_wh922", "ROW-LCjsdSetJ" ], "type": "CHART" }, "DASHBOARD_VERSION_KEY": "v2", "GRID_ID": { "children": [], "id": "GRID_ID", "type": "GRID" }, "HEADER_ID": { "id": "HEADER_ID", "meta": { "text": "Tabbed Dashboard" }, "type": "HEADER" }, "ROOT_ID": { "children": [ "TABS-lV0r00f4H1" ], "id": "ROOT_ID", "type": "ROOT" }, "ROW-7G2o5uDvfo": { "children": [ "CHART-c0EjR-OZ0n" ], "id": "ROW-7G2o5uDvfo", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS" ], "type": "ROW" }, "ROW-7ygtDczaQ": { "children": [ "CHART-dxV7Il74hH" ], "id": "ROW-7ygtDczaQ", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-gcQJxApOZS", "TABS-afnrUvdxYF", "TAB-jNNd4WWar1" ], "type": "ROW" }, "ROW-G73z9PIHn": { "children": [ "CHART-jJ5Yj1Ptaz" ], "id": "ROW-G73z9PIHn", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj", "TAB-z81Q87PD7" ], "type": "ROW" }, "ROW-LCjsdSetJ": { "children": [ "CHART-z4gmEuCqQ5" ], "id": "ROW-LCjsdSetJ", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj", "TAB-EcNm_wh922" ], "type": "ROW" }, "TAB-EcNm_wh922": { "children": [ "ROW-LCjsdSetJ" ], "id": "TAB-EcNm_wh922", "meta": { "text": "row tab 1" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj" ], "type": "TAB" }, "TAB-NF3dlrWGS": { "children": [ "ROW-7G2o5uDvfo", "TABS-CSjo6VfNrj" ], "id": "TAB-NF3dlrWGS", "meta": { "text": "Tab A" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1" ], "type": "TAB" }, "TAB-gcQJxApOZS": { "children": [ "TABS-afnrUvdxYF" ], "id": "TAB-gcQJxApOZS", "meta": { "text": "Tab B" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1" ], "type": "TAB" }, "TAB-jNNd4WWar1": { "children": [ "ROW-7ygtDczaQ" ], "id": "TAB-jNNd4WWar1", "meta": { "text": "New Tab" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-gcQJxApOZS", "TABS-afnrUvdxYF" ], "type": "TAB" }, "TAB-z81Q87PD7": { "children": [ "ROW-G73z9PIHn" ], "id": "TAB-z81Q87PD7", "meta": { "text": "row tab 2" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj" ], "type": "TAB" }, "TABS-CSjo6VfNrj": { "children": [ "TAB-EcNm_wh922", "TAB-z81Q87PD7" ], "id": "TABS-CSjo6VfNrj", "meta": {}, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS" ], "type": "TABS" }, "TABS-afnrUvdxYF": { "children": [ "TAB-jNNd4WWar1" ], "id": "TABS-afnrUvdxYF", "meta": {}, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-gcQJxApOZS" ], "type": "TABS" }, "TABS-lV0r00f4H1": { "children": [ "TAB-NF3dlrWGS", "TAB-gcQJxApOZS" ], "id": "TABS-lV0r00f4H1", "meta": {}, "parents": [ "ROOT_ID" ], "type": "TABS" } } """) pos = json.loads(js) slices = [ db.session.query(Slice).filter_by(slice_name=name).first() for name in tabbed_dash_slices ] slices = sorted(slices, key=lambda x: x.id) update_slice_ids(pos, slices) dash.position_json = json.dumps(pos, indent=4) dash.slices = slices dash.dashboard_title = "Tabbed Dashboard" dash.slug = slug db.session.merge(dash) db.session.commit()
def load_world_bank_health_n_pop( only_metadata=False, force=False ): # pylint: disable=too-many-locals """Loads the world bank health dataset, slices and a dashboard""" tbl_name = "wb_health_population" database = utils.get_example_database() table_exists = database.has_table_by_name(tbl_name) if not only_metadata and (not table_exists or force): data = get_example_data("countries.json.gz") pdf = pd.read_json(data) pdf.columns = [col.replace(".", "_") for col in pdf.columns] pdf.year = pd.to_datetime(pdf.year) pdf.to_sql( tbl_name, database.get_sqla_engine(), if_exists="replace", chunksize=50, dtype={ "year": DateTime(), "country_code": String(3), "country_name": String(255), "region": String(255), }, index=False, ) print("Creating table [wb_health_population] reference") tbl = db.session.query(TBL).filter_by(table_name=tbl_name).first() if not tbl: tbl = TBL(table_name=tbl_name) tbl.description = utils.readfile(os.path.join(EXAMPLES_FOLDER, "countries.md")) tbl.main_dttm_col = "year" tbl.database = database tbl.filter_select_enabled = True metrics = [ "sum__SP_POP_TOTL", "sum__SH_DYN_AIDS", "sum__SH_DYN_AIDS", "sum__SP_RUR_TOTL_ZS", "sum__SP_DYN_LE00_IN", "sum__SP_RUR_TOTL", ] for metric in metrics: if not any(col.metric_name == metric for col in tbl.metrics): aggr_func = metric[:3] col = str(column(metric[5:]).compile(db.engine)) tbl.metrics.append( SqlMetric(metric_name=metric, expression=f"{aggr_func}({col})") ) db.session.merge(tbl) db.session.commit() tbl.fetch_metadata() metric = "sum__SP_POP_TOTL" metrics = ["sum__SP_POP_TOTL"] secondary_metric = { "aggregate": "SUM", "column": { "column_name": "SP_RUR_TOTL", "optionName": "_col_SP_RUR_TOTL", "type": "DOUBLE", }, "expressionType": "SIMPLE", "hasCustomLabel": True, "label": "Rural Population", } defaults = { "compare_lag": "10", "compare_suffix": "o10Y", "limit": "25", "granularity_sqla": "year", "groupby": [], "row_limit": config["ROW_LIMIT"], "since": "2014-01-01", "until": "2014-01-02", "time_range": "2014-01-01 : 2014-01-02", "markup_type": "markdown", "country_fieldtype": "cca3", "entity": "country_code", "show_bubbles": True, } print("Creating slices") slices = [ Slice( slice_name="Region Filter", viz_type="filter_box", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, viz_type="filter_box", date_filter=False, filter_configs=[ { "asc": False, "clearable": True, "column": "region", "key": "2s98dfu", "metric": "sum__SP_POP_TOTL", "multiple": True, }, { "asc": False, "clearable": True, "key": "li3j2lk", "column": "country_name", "metric": "sum__SP_POP_TOTL", "multiple": True, }, ], ), ), Slice( slice_name="World's Population", viz_type="big_number", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, since="2000", viz_type="big_number", compare_lag="10", metric="sum__SP_POP_TOTL", compare_suffix="over 10Y", ), ), Slice( slice_name="Most Populated Countries", viz_type="table", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, viz_type="table", metrics=["sum__SP_POP_TOTL"], groupby=["country_name"], ), ), Slice( slice_name="Growth Rate", viz_type="line", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, viz_type="line", since="1960-01-01", metrics=["sum__SP_POP_TOTL"], num_period_compare="10", groupby=["country_name"], ), ), Slice( slice_name="% Rural", viz_type="world_map", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, viz_type="world_map", metric="sum__SP_RUR_TOTL_ZS", num_period_compare="10", secondary_metric=secondary_metric, ), ), Slice( slice_name="Life Expectancy VS Rural %", viz_type="bubble", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, viz_type="bubble", since="2011-01-01", until="2011-01-02", series="region", limit=0, entity="country_name", x="sum__SP_RUR_TOTL_ZS", y="sum__SP_DYN_LE00_IN", size="sum__SP_POP_TOTL", max_bubble_size="50", adhoc_filters=[ { "clause": "WHERE", "expressionType": "SIMPLE", "filterOptionName": "2745eae5", "comparator": [ "TCA", "MNP", "DMA", "MHL", "MCO", "SXM", "CYM", "TUV", "IMY", "KNA", "ASM", "ADO", "AMA", "PLW", ], "operator": "not in", "subject": "country_code", } ], ), ), Slice( slice_name="Rural Breakdown", viz_type="sunburst", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, viz_type="sunburst", groupby=["region", "country_name"], since="2011-01-01", until="2011-01-01", metric=metric, secondary_metric=secondary_metric, ), ), Slice( slice_name="World's Pop Growth", viz_type="area", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, since="1960-01-01", until="now", viz_type="area", groupby=["region"], metrics=metrics, ), ), Slice( slice_name="Box plot", viz_type="box_plot", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, since="1960-01-01", until="now", whisker_options="Min/max (no outliers)", x_ticks_layout="staggered", viz_type="box_plot", groupby=["region"], metrics=metrics, ), ), Slice( slice_name="Treemap", viz_type="treemap", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, since="1960-01-01", until="now", viz_type="treemap", metrics=["sum__SP_POP_TOTL"], groupby=["region", "country_code"], ), ), Slice( slice_name="Parallel Coordinates", viz_type="para", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, since="2011-01-01", until="2011-01-01", viz_type="para", limit=100, metrics=["sum__SP_POP_TOTL", "sum__SP_RUR_TOTL_ZS", "sum__SH_DYN_AIDS"], secondary_metric="sum__SP_POP_TOTL", series="country_name", ), ), ] misc_dash_slices.add(slices[-1].slice_name) for slc in slices: merge_slice(slc) print("Creating a World's Health Bank dashboard") dash_name = "World Bank's Data" slug = "world_health" dash = db.session.query(Dashboard).filter_by(slug=slug).first() if not dash: dash = Dashboard() dash.published = True js = textwrap.dedent( """\ { "CHART-36bfc934": { "children": [], "id": "CHART-36bfc934", "meta": { "chartId": 40, "height": 25, "sliceName": "Region Filter", "width": 2 }, "type": "CHART" }, "CHART-37982887": { "children": [], "id": "CHART-37982887", "meta": { "chartId": 41, "height": 25, "sliceName": "World's Population", "width": 2 }, "type": "CHART" }, "CHART-17e0f8d8": { "children": [], "id": "CHART-17e0f8d8", "meta": { "chartId": 42, "height": 92, "sliceName": "Most Populated Countries", "width": 3 }, "type": "CHART" }, "CHART-2ee52f30": { "children": [], "id": "CHART-2ee52f30", "meta": { "chartId": 43, "height": 38, "sliceName": "Growth Rate", "width": 6 }, "type": "CHART" }, "CHART-2d5b6871": { "children": [], "id": "CHART-2d5b6871", "meta": { "chartId": 44, "height": 52, "sliceName": "% Rural", "width": 7 }, "type": "CHART" }, "CHART-0fd0d252": { "children": [], "id": "CHART-0fd0d252", "meta": { "chartId": 45, "height": 50, "sliceName": "Life Expectancy VS Rural %", "width": 8 }, "type": "CHART" }, "CHART-97f4cb48": { "children": [], "id": "CHART-97f4cb48", "meta": { "chartId": 46, "height": 38, "sliceName": "Rural Breakdown", "width": 3 }, "type": "CHART" }, "CHART-b5e05d6f": { "children": [], "id": "CHART-b5e05d6f", "meta": { "chartId": 47, "height": 50, "sliceName": "World's Pop Growth", "width": 4 }, "type": "CHART" }, "CHART-e76e9f5f": { "children": [], "id": "CHART-e76e9f5f", "meta": { "chartId": 48, "height": 50, "sliceName": "Box plot", "width": 4 }, "type": "CHART" }, "CHART-a4808bba": { "children": [], "id": "CHART-a4808bba", "meta": { "chartId": 49, "height": 50, "sliceName": "Treemap", "width": 8 }, "type": "CHART" }, "COLUMN-071bbbad": { "children": [ "ROW-1e064e3c", "ROW-afdefba9" ], "id": "COLUMN-071bbbad", "meta": { "background": "BACKGROUND_TRANSPARENT", "width": 9 }, "type": "COLUMN" }, "COLUMN-fe3914b8": { "children": [ "CHART-36bfc934", "CHART-37982887" ], "id": "COLUMN-fe3914b8", "meta": { "background": "BACKGROUND_TRANSPARENT", "width": 2 }, "type": "COLUMN" }, "GRID_ID": { "children": [ "ROW-46632bc2", "ROW-3fa26c5d", "ROW-812b3f13" ], "id": "GRID_ID", "type": "GRID" }, "HEADER_ID": { "id": "HEADER_ID", "meta": { "text": "World's Bank Data" }, "type": "HEADER" }, "ROOT_ID": { "children": [ "GRID_ID" ], "id": "ROOT_ID", "type": "ROOT" }, "ROW-1e064e3c": { "children": [ "COLUMN-fe3914b8", "CHART-2d5b6871" ], "id": "ROW-1e064e3c", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "type": "ROW" }, "ROW-3fa26c5d": { "children": [ "CHART-b5e05d6f", "CHART-0fd0d252" ], "id": "ROW-3fa26c5d", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "type": "ROW" }, "ROW-46632bc2": { "children": [ "COLUMN-071bbbad", "CHART-17e0f8d8" ], "id": "ROW-46632bc2", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "type": "ROW" }, "ROW-812b3f13": { "children": [ "CHART-a4808bba", "CHART-e76e9f5f" ], "id": "ROW-812b3f13", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "type": "ROW" }, "ROW-afdefba9": { "children": [ "CHART-2ee52f30", "CHART-97f4cb48" ], "id": "ROW-afdefba9", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "type": "ROW" }, "DASHBOARD_VERSION_KEY": "v2" } """ ) pos = json.loads(js) update_slice_ids(pos, slices) dash.dashboard_title = dash_name dash.position_json = json.dumps(pos, indent=4) dash.slug = slug dash.slices = slices[:-1] db.session.merge(dash) db.session.commit()
def load_misc_dashboard() -> None: """Loading a dashboard featuring misc charts""" print("Creating the dashboard") db.session.expunge_all() dash = db.session.query(Dashboard).filter_by(slug=DASH_SLUG).first() if not dash: dash = Dashboard() js = textwrap.dedent("""\ { "CHART-BkeVbh8ANQ": { "children": [], "id": "CHART-BkeVbh8ANQ", "meta": { "chartId": 4004, "height": 34, "sliceName": "Multi Line", "width": 8 }, "type": "CHART" }, "CHART-H1HYNzEANX": { "children": [], "id": "CHART-H1HYNzEANX", "meta": { "chartId": 3940, "height": 50, "sliceName": "Energy Sankey", "width": 6 }, "type": "CHART" }, "CHART-HJOYVMV0E7": { "children": [], "id": "CHART-HJOYVMV0E7", "meta": { "chartId": 3969, "height": 63, "sliceName": "Mapbox Long/Lat", "width": 6 }, "type": "CHART" }, "CHART-S1WYNz4AVX": { "children": [], "id": "CHART-S1WYNz4AVX", "meta": { "chartId": 3989, "height": 25, "sliceName": "Parallel Coordinates", "width": 4 }, "type": "CHART" }, "CHART-r19KVMNCE7": { "children": [], "id": "CHART-r19KVMNCE7", "meta": { "chartId": 3971, "height": 34, "sliceName": "Calendar Heatmap multiformat 0", "width": 4 }, "type": "CHART" }, "CHART-rJ4K4GV04Q": { "children": [], "id": "CHART-rJ4K4GV04Q", "meta": { "chartId": 3941, "height": 63, "sliceName": "Energy Force Layout", "width": 6 }, "type": "CHART" }, "CHART-rkgF4G4A4X": { "children": [], "id": "CHART-rkgF4G4A4X", "meta": { "chartId": 3970, "height": 25, "sliceName": "Birth in France by department in 2016", "width": 8 }, "type": "CHART" }, "CHART-rywK4GVR4X": { "children": [], "id": "CHART-rywK4GVR4X", "meta": { "chartId": 3942, "height": 50, "sliceName": "Heatmap", "width": 6 }, "type": "CHART" }, "COLUMN-ByUFVf40EQ": { "children": [ "CHART-rywK4GVR4X", "CHART-HJOYVMV0E7" ], "id": "COLUMN-ByUFVf40EQ", "meta": { "background": "BACKGROUND_TRANSPARENT", "width": 6 }, "type": "COLUMN" }, "COLUMN-rkmYVGN04Q": { "children": [ "CHART-rJ4K4GV04Q", "CHART-H1HYNzEANX" ], "id": "COLUMN-rkmYVGN04Q", "meta": { "background": "BACKGROUND_TRANSPARENT", "width": 6 }, "type": "COLUMN" }, "GRID_ID": { "children": [ "ROW-SytNzNA4X", "ROW-S1MK4M4A4X", "ROW-HkFFEzVRVm" ], "id": "GRID_ID", "type": "GRID" }, "HEADER_ID": { "id": "HEADER_ID", "meta": { "text": "Misc Charts" }, "type": "HEADER" }, "ROOT_ID": { "children": [ "GRID_ID" ], "id": "ROOT_ID", "type": "ROOT" }, "ROW-HkFFEzVRVm": { "children": [ "CHART-r19KVMNCE7", "CHART-BkeVbh8ANQ" ], "id": "ROW-HkFFEzVRVm", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "type": "ROW" }, "ROW-S1MK4M4A4X": { "children": [ "COLUMN-rkmYVGN04Q", "COLUMN-ByUFVf40EQ" ], "id": "ROW-S1MK4M4A4X", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "type": "ROW" }, "ROW-SytNzNA4X": { "children": [ "CHART-rkgF4G4A4X", "CHART-S1WYNz4AVX" ], "id": "ROW-SytNzNA4X", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "type": "ROW" }, "DASHBOARD_VERSION_KEY": "v2" } """) pos = json.loads(js) slices = (db.session.query(Slice).filter( Slice.slice_name.in_(misc_dash_slices)).all()) slices = sorted(slices, key=lambda x: x.id) update_slice_ids(pos, slices) dash.dashboard_title = "Misc Charts" dash.position_json = json.dumps(pos, indent=4) dash.slug = DASH_SLUG dash.slices = slices db.session.merge(dash) db.session.commit()
def load_dashboard(self): with app.app_context(): table = (db.session.query(SqlaTable).filter_by( table_name="energy_usage").one()) # get a slice from the allowed table slice = db.session.query(Slice).filter_by( slice_name="Energy Sankey").one() self.grant_public_access_to_table(table) pytest.hidden_dash_slug = f"hidden_dash_{random()}" pytest.published_dash_slug = f"published_dash_{random()}" # Create a published and hidden dashboard and add them to the database published_dash = Dashboard() published_dash.dashboard_title = "Published Dashboard" published_dash.slug = pytest.published_dash_slug published_dash.slices = [slice] published_dash.published = True hidden_dash = Dashboard() hidden_dash.dashboard_title = "Hidden Dashboard" hidden_dash.slug = pytest.hidden_dash_slug hidden_dash.slices = [slice] hidden_dash.published = False db.session.merge(published_dash) db.session.merge(hidden_dash) yield db.session.commit() self.revoke_public_access_to_table(table) db.session.delete(published_dash) db.session.delete(hidden_dash) db.session.commit()
def import_dashboard( # pylint: disable=too-many-locals,too-many-statements dashboard_to_import: Dashboard, dataset_id_mapping: Optional[Dict[int, int]] = None, import_time: Optional[int] = None, ) -> int: """Imports the dashboard from the object to the database. Once dashboard is imported, json_metadata field is extended and stores remote_id and import_time. It helps to decide if the dashboard has to be overridden or just copies over. Slices that belong to this dashboard will be wired to existing tables. This function can be used to import/export dashboards between multiple superset instances. Audit metadata isn't copied over. """ def alter_positions(dashboard: Dashboard, old_to_new_slc_id_dict: Dict[int, int]) -> None: """Updates slice_ids in the position json. Sample position_json data: { "DASHBOARD_VERSION_KEY": "v2", "DASHBOARD_ROOT_ID": { "type": "DASHBOARD_ROOT_TYPE", "id": "DASHBOARD_ROOT_ID", "children": ["DASHBOARD_GRID_ID"] }, "DASHBOARD_GRID_ID": { "type": "DASHBOARD_GRID_TYPE", "id": "DASHBOARD_GRID_ID", "children": ["DASHBOARD_CHART_TYPE-2"] }, "DASHBOARD_CHART_TYPE-2": { "type": "CHART", "id": "DASHBOARD_CHART_TYPE-2", "children": [], "meta": { "width": 4, "height": 50, "chartId": 118 } }, } """ position_data = json.loads(dashboard.position_json) position_json = position_data.values() for value in position_json: if (isinstance(value, dict) and value.get("meta") and value.get("meta", {}).get("chartId")): old_slice_id = value["meta"]["chartId"] if old_slice_id in old_to_new_slc_id_dict: value["meta"]["chartId"] = old_to_new_slc_id_dict[ old_slice_id] dashboard.position_json = json.dumps(position_data) def alter_native_filters(dashboard: Dashboard) -> None: json_metadata = json.loads(dashboard.json_metadata) native_filter_configuration = json_metadata.get( "native_filter_configuration") if not native_filter_configuration: return for native_filter in native_filter_configuration: for target in native_filter.get("targets", []): old_dataset_id = target.get("datasetId") if dataset_id_mapping and old_dataset_id is not None: target["datasetId"] = dataset_id_mapping.get( old_dataset_id, old_dataset_id, ) dashboard.json_metadata = json.dumps(json_metadata) logger.info("Started import of the dashboard: %s", dashboard_to_import.to_json()) session = db.session logger.info("Dashboard has %d slices", len(dashboard_to_import.slices)) # copy slices object as Slice.import_slice will mutate the slice # and will remove the existing dashboard - slice association slices = copy(dashboard_to_import.slices) # Clearing the slug to avoid conflicts dashboard_to_import.slug = None old_json_metadata = json.loads(dashboard_to_import.json_metadata or "{}") old_to_new_slc_id_dict: Dict[int, int] = {} new_timed_refresh_immune_slices = [] new_expanded_slices = {} new_filter_scopes = {} i_params_dict = dashboard_to_import.params_dict remote_id_slice_map = { slc.params_dict["remote_id"]: slc for slc in session.query(Slice).all() if "remote_id" in slc.params_dict } for slc in slices: logger.info( "Importing slice %s from the dashboard: %s", slc.to_json(), dashboard_to_import.dashboard_title, ) remote_slc = remote_id_slice_map.get(slc.id) new_slc_id = import_chart(slc, remote_slc, import_time=import_time) old_to_new_slc_id_dict[slc.id] = new_slc_id # update json metadata that deals with slice ids new_slc_id_str = str(new_slc_id) old_slc_id_str = str(slc.id) if ("timed_refresh_immune_slices" in i_params_dict and old_slc_id_str in i_params_dict["timed_refresh_immune_slices"]): new_timed_refresh_immune_slices.append(new_slc_id_str) if ("expanded_slices" in i_params_dict and old_slc_id_str in i_params_dict["expanded_slices"]): new_expanded_slices[new_slc_id_str] = i_params_dict[ "expanded_slices"][old_slc_id_str] # since PR #9109, filter_immune_slices and filter_immune_slice_fields # are converted to filter_scopes # but dashboard create from import may still have old dashboard filter metadata # here we convert them to new filter_scopes metadata first filter_scopes = {} if ("filter_immune_slices" in i_params_dict or "filter_immune_slice_fields" in i_params_dict): filter_scopes = convert_filter_scopes(old_json_metadata, slices) if "filter_scopes" in i_params_dict: filter_scopes = old_json_metadata.get("filter_scopes") # then replace old slice id to new slice id: if filter_scopes: new_filter_scopes = copy_filter_scopes( old_to_new_slc_id_dict=old_to_new_slc_id_dict, old_filter_scopes=filter_scopes, ) # override the dashboard existing_dashboard = None for dash in session.query(Dashboard).all(): if ("remote_id" in dash.params_dict and dash.params_dict["remote_id"] == dashboard_to_import.id): existing_dashboard = dash dashboard_to_import = dashboard_to_import.copy() dashboard_to_import.id = None dashboard_to_import.reset_ownership() # position_json can be empty for dashboards # with charts added from chart-edit page and without re-arranging if dashboard_to_import.position_json: alter_positions(dashboard_to_import, old_to_new_slc_id_dict) dashboard_to_import.alter_params(import_time=import_time) dashboard_to_import.remove_params(param_to_remove="filter_immune_slices") dashboard_to_import.remove_params( param_to_remove="filter_immune_slice_fields") if new_filter_scopes: dashboard_to_import.alter_params(filter_scopes=new_filter_scopes) if new_expanded_slices: dashboard_to_import.alter_params(expanded_slices=new_expanded_slices) if new_timed_refresh_immune_slices: dashboard_to_import.alter_params( timed_refresh_immune_slices=new_timed_refresh_immune_slices) alter_native_filters(dashboard_to_import) new_slices = (session.query(Slice).filter( Slice.id.in_(old_to_new_slc_id_dict.values())).all()) if existing_dashboard: existing_dashboard.override(dashboard_to_import) existing_dashboard.slices = new_slices session.flush() return existing_dashboard.id dashboard_to_import.slices = new_slices session.add(dashboard_to_import) session.flush() return dashboard_to_import.id # type: ignore
def set_dash_metadata( # pylint: disable=too-many-locals dashboard: Dashboard, data: Dict[Any, Any], old_to_new_slice_ids: Optional[Dict[int, int]] = None, commit: bool = False, ) -> Dashboard: positions = data.get("positions") new_filter_scopes = {} md = dashboard.params_dict if positions is not None: # find slices in the position data slice_ids = [ value.get("meta", {}).get("chartId") for value in positions.values() if isinstance(value, dict) ] session = db.session() current_slices = session.query(Slice).filter( Slice.id.in_(slice_ids)).all() dashboard.slices = current_slices # add UUID to positions uuid_map = {slice.id: str(slice.uuid) for slice in current_slices} for obj in positions.values(): if (isinstance(obj, dict) and obj["type"] == "CHART" and obj["meta"]["chartId"]): chart_id = obj["meta"]["chartId"] obj["meta"]["uuid"] = uuid_map.get(chart_id) # remove leading and trailing white spaces in the dumped json dashboard.position_json = json.dumps(positions, indent=None, separators=(",", ":"), sort_keys=True) if "filter_scopes" in data: # replace filter_id and immune ids from old slice id to new slice id: # and remove slice ids that are not in dash anymore slc_id_dict: Dict[int, int] = {} if old_to_new_slice_ids: slc_id_dict = { old: new for old, new in old_to_new_slice_ids.items() if new in slice_ids } else: slc_id_dict = {sid: sid for sid in slice_ids} new_filter_scopes = copy_filter_scopes( old_to_new_slc_id_dict=slc_id_dict, old_filter_scopes=json.loads(data["filter_scopes"] or "{}") if isinstance(data["filter_scopes"], str) else data["filter_scopes"], ) default_filters_data = json.loads(data.get("default_filters", "{}")) applicable_filters = { key: v for key, v in default_filters_data.items() if int(key) in slice_ids } md["default_filters"] = json.dumps(applicable_filters) # positions have its own column, no need to store it in metadata md.pop("positions", None) # The css and dashboard_title properties are not part of the metadata # TODO (geido): remove by refactoring/deprecating save_dash endpoint if data.get("css") is not None: dashboard.css = data.get("css") if data.get("dashboard_title") is not None: dashboard.dashboard_title = data.get("dashboard_title") if new_filter_scopes: md["filter_scopes"] = new_filter_scopes else: md.pop("filter_scopes", None) md.setdefault("timed_refresh_immune_slices", []) if data.get("color_namespace") is None: md.pop("color_namespace", None) else: md["color_namespace"] = data.get("color_namespace") md["expanded_slices"] = data.get("expanded_slices", {}) md["refresh_frequency"] = data.get("refresh_frequency", 0) md["color_scheme"] = data.get("color_scheme", "") md["label_colors"] = data.get("label_colors", {}) dashboard.json_metadata = json.dumps(md) if commit: db.session.commit() return dashboard
def get_by_id_or_slug(id_or_slug: str) -> Dashboard: dashboard = Dashboard.get(id_or_slug) if not dashboard: raise DashboardNotFoundError() security_manager.raise_for_dashboard_access(dashboard) return dashboard
def load_birth_names(only_metadata=False, force=False): """Loading birth name dataset from a zip file in the repo""" # pylint: disable=too-many-locals tbl_name = "birth_names" database = get_example_database() table_exists = database.has_table_by_name(tbl_name) if not only_metadata and (not table_exists or force): load_data(tbl_name, database) obj = db.session.query(TBL).filter_by(table_name=tbl_name).first() if not obj: print(f"Creating table [{tbl_name}] reference") obj = TBL(table_name=tbl_name) db.session.add(obj) obj.main_dttm_col = "ds" obj.database = database obj.filter_select_enabled = True if not any(col.column_name == "num_california" for col in obj.columns): col_state = str(column("state").compile(db.engine)) col_num = str(column("num").compile(db.engine)) obj.columns.append( TableColumn( column_name="num_california", expression=f"CASE WHEN {col_state} = 'CA' THEN {col_num} ELSE 0 END", ) ) if not any(col.metric_name == "sum__num" for col in obj.metrics): col = str(column("num").compile(db.engine)) obj.metrics.append(SqlMetric(metric_name="sum__num", expression=f"SUM({col})")) db.session.commit() obj.fetch_metadata() tbl = obj metrics = [ { "expressionType": "SIMPLE", "column": {"column_name": "num", "type": "BIGINT"}, "aggregate": "SUM", "label": "Births", "optionName": "metric_11", } ] metric = "sum__num" defaults = { "compare_lag": "10", "compare_suffix": "o10Y", "limit": "25", "granularity_sqla": "ds", "groupby": [], "row_limit": config["ROW_LIMIT"], "since": "100 years ago", "until": "now", "viz_type": "table", "markup_type": "markdown", } admin = security_manager.find_user("admin") print("Creating some slices") slices = [ Slice( slice_name="Participants", viz_type="big_number", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, viz_type="big_number", granularity_sqla="ds", compare_lag="5", compare_suffix="over 5Y", metric=metric, ), ), Slice( slice_name="Genders", viz_type="pie", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, viz_type="pie", groupby=["gender"], metric=metric ), ), Slice( slice_name="Trends", viz_type="line", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, viz_type="line", groupby=["name"], granularity_sqla="ds", rich_tooltip=True, show_legend=True, metrics=metrics, ), ), Slice( slice_name="Genders by State", viz_type="dist_bar", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, adhoc_filters=[ { "clause": "WHERE", "expressionType": "SIMPLE", "filterOptionName": "2745eae5", "comparator": ["other"], "operator": "not in", "subject": "state", } ], viz_type="dist_bar", metrics=[ { "expressionType": "SIMPLE", "column": {"column_name": "sum_boys", "type": "BIGINT(20)"}, "aggregate": "SUM", "label": "Boys", "optionName": "metric_11", }, { "expressionType": "SIMPLE", "column": {"column_name": "sum_girls", "type": "BIGINT(20)"}, "aggregate": "SUM", "label": "Girls", "optionName": "metric_12", }, ], groupby=["state"], ), ), Slice( slice_name="Girls", viz_type="table", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, groupby=["name"], adhoc_filters=[gen_filter("gender", "girl")], row_limit=50, timeseries_limit_metric="sum__num", metrics=metrics, ), ), Slice( slice_name="Girl Name Cloud", viz_type="word_cloud", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, viz_type="word_cloud", size_from="10", series="name", size_to="70", rotation="square", limit="100", adhoc_filters=[gen_filter("gender", "girl")], metric=metric, ), ), Slice( slice_name="Boys", viz_type="table", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, groupby=["name"], adhoc_filters=[gen_filter("gender", "boy")], row_limit=50, metrics=metrics, ), ), Slice( slice_name="Boy Name Cloud", viz_type="word_cloud", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, viz_type="word_cloud", size_from="10", series="name", size_to="70", rotation="square", limit="100", adhoc_filters=[gen_filter("gender", "boy")], metric=metric, ), ), Slice( slice_name="Top 10 Girl Name Share", viz_type="area", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, adhoc_filters=[gen_filter("gender", "girl")], comparison_type="values", groupby=["name"], limit=10, stacked_style="expand", time_grain_sqla="P1D", viz_type="area", x_axis_forma="smart_date", metrics=metrics, ), ), Slice( slice_name="Top 10 Boy Name Share", viz_type="area", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, adhoc_filters=[gen_filter("gender", "boy")], comparison_type="values", groupby=["name"], limit=10, stacked_style="expand", time_grain_sqla="P1D", viz_type="area", x_axis_forma="smart_date", metrics=metrics, ), ), ] misc_slices = [ Slice( slice_name="Average and Sum Trends", viz_type="dual_line", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, viz_type="dual_line", metric={ "expressionType": "SIMPLE", "column": {"column_name": "num", "type": "BIGINT(20)"}, "aggregate": "AVG", "label": "AVG(num)", "optionName": "metric_vgops097wej_g8uff99zhk7", }, metric_2="sum__num", granularity_sqla="ds", metrics=metrics, ), ), Slice( slice_name="Num Births Trend", viz_type="line", datasource_type="table", datasource_id=tbl.id, params=get_slice_json(defaults, viz_type="line", metrics=metrics), ), Slice( slice_name="Daily Totals", viz_type="table", datasource_type="table", datasource_id=tbl.id, created_by=admin, params=get_slice_json( defaults, groupby=["ds"], since="40 years ago", until="now", viz_type="table", metrics=metrics, ), ), Slice( slice_name="Number of California Births", viz_type="big_number_total", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, metric={ "expressionType": "SIMPLE", "column": { "column_name": "num_california", "expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END", }, "aggregate": "SUM", "label": "SUM(num_california)", }, viz_type="big_number_total", granularity_sqla="ds", ), ), Slice( slice_name="Top 10 California Names Timeseries", viz_type="line", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, metrics=[ { "expressionType": "SIMPLE", "column": { "column_name": "num_california", "expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END", }, "aggregate": "SUM", "label": "SUM(num_california)", } ], viz_type="line", granularity_sqla="ds", groupby=["name"], timeseries_limit_metric={ "expressionType": "SIMPLE", "column": { "column_name": "num_california", "expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END", }, "aggregate": "SUM", "label": "SUM(num_california)", }, limit="10", ), ), Slice( slice_name="Names Sorted by Num in California", viz_type="table", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, metrics=metrics, groupby=["name"], row_limit=50, timeseries_limit_metric={ "expressionType": "SIMPLE", "column": { "column_name": "num_california", "expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END", }, "aggregate": "SUM", "label": "SUM(num_california)", }, ), ), Slice( slice_name="Number of Girls", viz_type="big_number_total", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, metric=metric, viz_type="big_number_total", granularity_sqla="ds", adhoc_filters=[gen_filter("gender", "girl")], subheader="total female participants", ), ), Slice( slice_name="Pivot Table", viz_type="pivot_table", datasource_type="table", datasource_id=tbl.id, params=get_slice_json( defaults, viz_type="pivot_table", groupby=["name"], columns=["state"], metrics=metrics, ), ), ] for slc in slices: merge_slice(slc) for slc in misc_slices: merge_slice(slc) misc_dash_slices.add(slc.slice_name) print("Creating a dashboard") dash = db.session.query(Dashboard).filter_by(slug="births").first() if not dash: dash = Dashboard() db.session.add(dash) dash.published = True dash.json_metadata = textwrap.dedent( """\ { "label_colors": { "Girls": "#FF69B4", "Boys": "#ADD8E6", "girl": "#FF69B4", "boy": "#ADD8E6" } }""" ) js = textwrap.dedent( # pylint: disable=line-too-long """\ { "CHART-6GdlekVise": { "children": [], "id": "CHART-6GdlekVise", "meta": { "chartId": 5547, "height": 50, "sliceName": "Top 10 Girl Name Share", "width": 5 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-eh0w37bWbR" ], "type": "CHART" }, "CHART-6n9jxb30JG": { "children": [], "id": "CHART-6n9jxb30JG", "meta": { "chartId": 5540, "height": 36, "sliceName": "Genders by State", "width": 5 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW--EyBZQlDi" ], "type": "CHART" }, "CHART-Jj9qh1ol-N": { "children": [], "id": "CHART-Jj9qh1ol-N", "meta": { "chartId": 5545, "height": 50, "sliceName": "Boy Name Cloud", "width": 4 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-kzWtcvo8R1" ], "type": "CHART" }, "CHART-ODvantb_bF": { "children": [], "id": "CHART-ODvantb_bF", "meta": { "chartId": 5548, "height": 50, "sliceName": "Top 10 Boy Name Share", "width": 5 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-kzWtcvo8R1" ], "type": "CHART" }, "CHART-PAXUUqwmX9": { "children": [], "id": "CHART-PAXUUqwmX9", "meta": { "chartId": 5538, "height": 34, "sliceName": "Genders", "width": 3 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-2n0XgiHDgs" ], "type": "CHART" }, "CHART-_T6n_K9iQN": { "children": [], "id": "CHART-_T6n_K9iQN", "meta": { "chartId": 5539, "height": 36, "sliceName": "Trends", "width": 7 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW--EyBZQlDi" ], "type": "CHART" }, "CHART-eNY0tcE_ic": { "children": [], "id": "CHART-eNY0tcE_ic", "meta": { "chartId": 5537, "height": 34, "sliceName": "Participants", "width": 3 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-2n0XgiHDgs" ], "type": "CHART" }, "CHART-g075mMgyYb": { "children": [], "id": "CHART-g075mMgyYb", "meta": { "chartId": 5541, "height": 50, "sliceName": "Girls", "width": 3 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-eh0w37bWbR" ], "type": "CHART" }, "CHART-n-zGGE6S1y": { "children": [], "id": "CHART-n-zGGE6S1y", "meta": { "chartId": 5542, "height": 50, "sliceName": "Girl Name Cloud", "width": 4 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-eh0w37bWbR" ], "type": "CHART" }, "CHART-vJIPjmcbD3": { "children": [], "id": "CHART-vJIPjmcbD3", "meta": { "chartId": 5543, "height": 50, "sliceName": "Boys", "width": 3 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-kzWtcvo8R1" ], "type": "CHART" }, "DASHBOARD_VERSION_KEY": "v2", "GRID_ID": { "children": [ "ROW-2n0XgiHDgs", "ROW--EyBZQlDi", "ROW-eh0w37bWbR", "ROW-kzWtcvo8R1" ], "id": "GRID_ID", "parents": [ "ROOT_ID" ], "type": "GRID" }, "HEADER_ID": { "id": "HEADER_ID", "meta": { "text": "Births" }, "type": "HEADER" }, "MARKDOWN-zaflB60tbC": { "children": [], "id": "MARKDOWN-zaflB60tbC", "meta": { "code": "<div style=\\"text-align:center\\"> <h1>Birth Names Dashboard</h1> <img src=\\"/static/assets/images/babies.png\\" style=\\"width:50%;\\"></div>", "height": 34, "width": 6 }, "parents": [ "ROOT_ID", "GRID_ID", "ROW-2n0XgiHDgs" ], "type": "MARKDOWN" }, "ROOT_ID": { "children": [ "GRID_ID" ], "id": "ROOT_ID", "type": "ROOT" }, "ROW--EyBZQlDi": { "children": [ "CHART-_T6n_K9iQN", "CHART-6n9jxb30JG" ], "id": "ROW--EyBZQlDi", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": [ "ROOT_ID", "GRID_ID" ], "type": "ROW" }, "ROW-2n0XgiHDgs": { "children": [ "CHART-eNY0tcE_ic", "MARKDOWN-zaflB60tbC", "CHART-PAXUUqwmX9" ], "id": "ROW-2n0XgiHDgs", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": [ "ROOT_ID", "GRID_ID" ], "type": "ROW" }, "ROW-eh0w37bWbR": { "children": [ "CHART-g075mMgyYb", "CHART-n-zGGE6S1y", "CHART-6GdlekVise" ], "id": "ROW-eh0w37bWbR", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": [ "ROOT_ID", "GRID_ID" ], "type": "ROW" }, "ROW-kzWtcvo8R1": { "children": [ "CHART-vJIPjmcbD3", "CHART-Jj9qh1ol-N", "CHART-ODvantb_bF" ], "id": "ROW-kzWtcvo8R1", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": [ "ROOT_ID", "GRID_ID" ], "type": "ROW" } } """ # pylint: enable=line-too-long ) pos = json.loads(js) # dashboard v2 doesn't allow add markup slice dash.slices = [slc for slc in slices if slc.viz_type != "markup"] update_slice_ids(pos, dash.slices) dash.dashboard_title = "USA Births Names" dash.position_json = json.dumps(pos, indent=4) dash.slug = "births" db.session.commit()
def load_deck_dash() -> None: print("Loading deck.gl dashboard") slices = [] tbl = db.session.query(TBL).filter_by(table_name="long_lat").first() slice_data = { "spatial": {"type": "latlong", "lonCol": "LON", "latCol": "LAT"}, "color_picker": COLOR_RED, "datasource": "5__table", "granularity_sqla": None, "groupby": [], "mapbox_style": "mapbox://styles/mapbox/light-v9", "multiplier": 10, "point_radius_fixed": {"type": "metric", "value": "count"}, "point_unit": "square_m", "min_radius": 1, "max_radius": 250, "row_limit": 5000, "time_range": " : ", "size": "count", "time_grain_sqla": None, "viewport": { "bearing": -4.952916738791771, "latitude": 37.78926922909199, "longitude": -122.42613341901688, "pitch": 4.750411100577438, "zoom": 12.729132798697304, }, "viz_type": "deck_scatter", } print("Creating Scatterplot slice") slc = Slice( slice_name="Scatterplot", viz_type="deck_scatter", datasource_type="table", datasource_id=tbl.id, params=get_slice_json(slice_data), ) merge_slice(slc) slices.append(slc) slice_data = { "point_unit": "square_m", "row_limit": 5000, "spatial": {"type": "latlong", "lonCol": "LON", "latCol": "LAT"}, "mapbox_style": "mapbox://styles/mapbox/dark-v9", "granularity_sqla": None, "size": "count", "viz_type": "deck_screengrid", "time_range": "No filter", "point_radius": "Auto", "color_picker": {"a": 1, "r": 14, "b": 0, "g": 255}, "grid_size": 20, "viewport": { "zoom": 14.161641703941438, "longitude": -122.41827069521386, "bearing": -4.952916738791771, "latitude": 37.76024135844065, "pitch": 4.750411100577438, }, "point_radius_fixed": {"type": "fix", "value": 2000}, "datasource": "5__table", "time_grain_sqla": None, "groupby": [], } print("Creating Screen Grid slice") slc = Slice( slice_name="Screen grid", viz_type="deck_screengrid", datasource_type="table", datasource_id=tbl.id, params=get_slice_json(slice_data), ) merge_slice(slc) slices.append(slc) slice_data = { "spatial": {"type": "latlong", "lonCol": "LON", "latCol": "LAT"}, "row_limit": 5000, "mapbox_style": "mapbox://styles/mapbox/streets-v9", "granularity_sqla": None, "size": "count", "viz_type": "deck_hex", "time_range": "No filter", "point_radius_unit": "Pixels", "point_radius": "Auto", "color_picker": {"a": 1, "r": 14, "b": 0, "g": 255}, "grid_size": 40, "extruded": True, "viewport": { "latitude": 37.789795085160335, "pitch": 54.08961642447763, "zoom": 13.835465702403654, "longitude": -122.40632230075536, "bearing": -2.3984797349335167, }, "point_radius_fixed": {"type": "fix", "value": 2000}, "datasource": "5__table", "time_grain_sqla": None, "groupby": [], } print("Creating Hex slice") slc = Slice( slice_name="Hexagons", viz_type="deck_hex", datasource_type="table", datasource_id=tbl.id, params=get_slice_json(slice_data), ) merge_slice(slc) slices.append(slc) slice_data = { "spatial": {"type": "latlong", "lonCol": "LON", "latCol": "LAT"}, "row_limit": 5000, "mapbox_style": "mapbox://styles/mapbox/satellite-streets-v9", "granularity_sqla": None, "size": "count", "viz_type": "deck_grid", "point_radius_unit": "Pixels", "point_radius": "Auto", "time_range": "No filter", "color_picker": {"a": 1, "r": 14, "b": 0, "g": 255}, "grid_size": 120, "extruded": True, "viewport": { "longitude": -122.42066918995666, "bearing": 155.80099696026355, "zoom": 12.699690845482069, "latitude": 37.7942314882596, "pitch": 53.470800300695146, }, "point_radius_fixed": {"type": "fix", "value": 2000}, "datasource": "5__table", "time_grain_sqla": None, "groupby": [], } print("Creating Grid slice") slc = Slice( slice_name="Grid", viz_type="deck_grid", datasource_type="table", datasource_id=tbl.id, params=get_slice_json(slice_data), ) merge_slice(slc) slices.append(slc) polygon_tbl = ( db.session.query(TBL).filter_by(table_name="sf_population_polygons").first() ) slice_data = { "datasource": "11__table", "viz_type": "deck_polygon", "slice_id": 41, "granularity_sqla": None, "time_grain_sqla": None, "time_range": " : ", "line_column": "contour", "metric": { "aggregate": "SUM", "column": { "column_name": "population", "description": None, "expression": None, "filterable": True, "groupby": True, "id": 1332, "is_dttm": False, "optionName": "_col_population", "python_date_format": None, "type": "BIGINT", "verbose_name": None, }, "expressionType": "SIMPLE", "hasCustomLabel": True, "label": "Population", "optionName": "metric_t2v4qbfiz1_w6qgpx4h2p", "sqlExpression": None, }, "line_type": "json", "linear_color_scheme": "oranges", "mapbox_style": "mapbox://styles/mapbox/light-v9", "viewport": { "longitude": -122.43388541747726, "latitude": 37.752020331384834, "zoom": 11.133995608594631, "bearing": 37.89506450385642, "pitch": 60, "width": 667, "height": 906, "altitude": 1.5, "maxZoom": 20, "minZoom": 0, "maxPitch": 60, "minPitch": 0, "maxLatitude": 85.05113, "minLatitude": -85.05113, }, "reverse_long_lat": False, "fill_color_picker": {"r": 3, "g": 65, "b": 73, "a": 1}, "stroke_color_picker": {"r": 0, "g": 122, "b": 135, "a": 1}, "filled": True, "stroked": False, "extruded": True, "multiplier": 0.1, "point_radius_fixed": { "type": "metric", "value": { "aggregate": None, "column": None, "expressionType": "SQL", "hasCustomLabel": None, "label": "Density", "optionName": "metric_c5rvwrzoo86_293h6yrv2ic", "sqlExpression": "SUM(population)/SUM(area)", }, }, "js_columns": [], "js_data_mutator": "", "js_tooltip": "", "js_onclick_href": "", "legend_format": ".1s", "legend_position": "tr", } print("Creating Polygon slice") slc = Slice( slice_name="Polygons", viz_type="deck_polygon", datasource_type="table", datasource_id=polygon_tbl.id, params=get_slice_json(slice_data), ) merge_slice(slc) slices.append(slc) slice_data = { "datasource": "10__table", "viz_type": "deck_arc", "slice_id": 42, "granularity_sqla": None, "time_grain_sqla": None, "time_range": " : ", "start_spatial": { "type": "latlong", "latCol": "LATITUDE", "lonCol": "LONGITUDE", }, "end_spatial": { "type": "latlong", "latCol": "LATITUDE_DEST", "lonCol": "LONGITUDE_DEST", }, "row_limit": 5000, "mapbox_style": "mapbox://styles/mapbox/light-v9", "viewport": { "altitude": 1.5, "bearing": 8.546256357301871, "height": 642, "latitude": 44.596651438714254, "longitude": -91.84340711201104, "maxLatitude": 85.05113, "maxPitch": 60, "maxZoom": 20, "minLatitude": -85.05113, "minPitch": 0, "minZoom": 0, "pitch": 60, "width": 997, "zoom": 2.929837070560775, }, "color_picker": {"r": 0, "g": 122, "b": 135, "a": 1}, "stroke_width": 1, } print("Creating Arc slice") slc = Slice( slice_name="Arcs", viz_type="deck_arc", datasource_type="table", datasource_id=db.session.query(TBL).filter_by(table_name="flights").first().id, params=get_slice_json(slice_data), ) merge_slice(slc) slices.append(slc) slice_data = { "datasource": "12__table", "slice_id": 43, "viz_type": "deck_path", "time_grain_sqla": None, "time_range": " : ", "line_column": "path_json", "line_type": "json", "row_limit": 5000, "mapbox_style": "mapbox://styles/mapbox/light-v9", "viewport": { "longitude": -122.18885402582598, "latitude": 37.73671752604488, "zoom": 9.51847667620428, "bearing": 0, "pitch": 0, "width": 669, "height": 1094, "altitude": 1.5, "maxZoom": 20, "minZoom": 0, "maxPitch": 60, "minPitch": 0, "maxLatitude": 85.05113, "minLatitude": -85.05113, }, "color_picker": {"r": 0, "g": 122, "b": 135, "a": 1}, "line_width": 150, "reverse_long_lat": False, "js_columns": ["color"], "js_data_mutator": "data => data.map(d => ({\n" " ...d,\n" " color: colors.hexToRGB(d.extraProps.color)\n" "}));", "js_tooltip": "", "js_onclick_href": "", } print("Creating Path slice") slc = Slice( slice_name="Path", viz_type="deck_path", datasource_type="table", datasource_id=db.session.query(TBL) .filter_by(table_name="bart_lines") .first() .id, params=get_slice_json(slice_data), ) merge_slice(slc) slices.append(slc) slug = "deck" print("Creating a dashboard") title = "deck.gl Demo" dash = db.session.query(Dashboard).filter_by(slug=slug).first() if not dash: dash = Dashboard() dash.published = True js = POSITION_JSON pos = json.loads(js) update_slice_ids(pos, slices) dash.position_json = json.dumps(pos, indent=4) dash.dashboard_title = title dash.slug = slug dash.slices = slices db.session.merge(dash) db.session.commit()
def _export(model: Dashboard, export_related: bool = True) -> Iterator[Tuple[str, str]]: dashboard_slug = secure_filename(model.dashboard_title) file_name = f"dashboards/{dashboard_slug}_{model.id}.yaml" payload = model.export_to_dict( recursive=False, include_parent_ref=False, include_defaults=True, export_uuids=True, ) # TODO (betodealmeida): move this logic to export_to_dict once this # becomes the default export endpoint for key, new_name in JSON_KEYS.items(): value: Optional[str] = payload.pop(key, None) if value: try: payload[new_name] = json.loads(value) except (TypeError, json.decoder.JSONDecodeError): logger.info("Unable to decode `%s` field: %s", key, value) payload[new_name] = {} # Extract all native filter datasets and replace native # filter dataset references with uuid for native_filter in payload.get("metadata", {}).get("native_filter_configuration", []): for target in native_filter.get("targets", []): dataset_id = target.pop("datasetId", None) if dataset_id is not None: dataset = DatasetDAO.find_by_id(dataset_id) if dataset: target["datasetUuid"] = str(dataset.uuid) if export_related: yield from ExportDatasetsCommand([dataset_id ]).run() # the mapping between dashboard -> charts is inferred from the position # attribute, so if it's not present we need to add a default config if not payload.get("position"): payload["position"] = get_default_position(model.dashboard_title) # if any charts or not referenced in position, we need to add them # in a new row referenced_charts = find_chart_uuids(payload["position"]) orphan_charts = { chart for chart in model.slices if str(chart.uuid) not in referenced_charts } if orphan_charts: payload["position"] = append_charts(payload["position"], orphan_charts) payload["version"] = EXPORT_VERSION file_content = yaml.safe_dump(payload, sort_keys=False) yield file_name, file_content if export_related: chart_ids = [chart.id for chart in model.slices] yield from ExportChartsCommand(chart_ids).run()
def test_users_can_view_published_dashboard(self): table = db.session.query(SqlaTable).filter_by( table_name="energy_usage").one() # get a slice from the allowed table slice = db.session.query(Slice).filter_by( slice_name="Energy Sankey").one() self.grant_public_access_to_table(table) hidden_dash_slug = f"hidden_dash_{random()}" published_dash_slug = f"published_dash_{random()}" # Create a published and hidden dashboard and add them to the database published_dash = Dashboard() published_dash.dashboard_title = "Published Dashboard" published_dash.slug = published_dash_slug published_dash.slices = [slice] published_dash.published = True hidden_dash = Dashboard() hidden_dash.dashboard_title = "Hidden Dashboard" hidden_dash.slug = hidden_dash_slug hidden_dash.slices = [slice] hidden_dash.published = False db.session.merge(published_dash) db.session.merge(hidden_dash) db.session.commit() resp = self.get_resp("/api/v1/dashboard/") self.assertNotIn(f"/superset/dashboard/{hidden_dash_slug}/", resp) self.assertIn(f"/superset/dashboard/{published_dash_slug}/", resp)
def test_import_empty_dashboard(self): empty_dash = self.create_dashboard("empty_dashboard", id=10001) imported_dash_id = Dashboard.import_obj(empty_dash, import_time=1989) imported_dash = self.get_dash(imported_dash_id) self.assert_dash_equals(empty_dash, imported_dash, check_position=False)
def test_users_can_view_own_dashboard(self): user = security_manager.find_user("gamma") my_dash_slug = f"my_dash_{random()}" not_my_dash_slug = f"not_my_dash_{random()}" # Create one dashboard I own and another that I don't dash = Dashboard() dash.dashboard_title = "My Dashboard" dash.slug = my_dash_slug dash.owners = [user] dash.slices = [] hidden_dash = Dashboard() hidden_dash.dashboard_title = "Not My Dashboard" hidden_dash.slug = not_my_dash_slug hidden_dash.slices = [] hidden_dash.owners = [] db.session.merge(dash) db.session.merge(hidden_dash) db.session.commit() self.login(user.username) resp = self.get_resp("/api/v1/dashboard/") self.assertIn(f"/superset/dashboard/{my_dash_slug}/", resp) self.assertNotIn(f"/superset/dashboard/{not_my_dash_slug}/", resp)
def set_dash_metadata( # pylint: disable=too-many-locals,too-many-branches,too-many-statements dashboard: Dashboard, data: Dict[Any, Any], old_to_new_slice_ids: Optional[Dict[int, int]] = None, ) -> None: positions = data["positions"] # find slices in the position data slice_ids = [] slice_id_to_name = {} for value in positions.values(): if isinstance(value, dict): try: slice_id = value["meta"]["chartId"] slice_ids.append(slice_id) slice_id_to_name[slice_id] = value["meta"]["sliceName"] except KeyError: pass session = db.session() current_slices = session.query(Slice).filter( Slice.id.in_(slice_ids)).all() dashboard.slices = current_slices # update slice names. this assumes user has permissions to update the slice # we allow user set slice name be empty string for slc in dashboard.slices: try: new_name = slice_id_to_name[slc.id] if slc.slice_name != new_name: slc.slice_name = new_name session.merge(slc) session.flush() except KeyError: pass # remove leading and trailing white spaces in the dumped json dashboard.position_json = json.dumps(positions, indent=None, separators=(",", ":"), sort_keys=True) md = dashboard.params_dict dashboard.css = data.get("css") dashboard.dashboard_title = data["dashboard_title"] if "timed_refresh_immune_slices" not in md: md["timed_refresh_immune_slices"] = [] new_filter_scopes = {} if "filter_scopes" in data: # replace filter_id and immune ids from old slice id to new slice id: # and remove slice ids that are not in dash anymore slc_id_dict: Dict[int, int] = {} if old_to_new_slice_ids: slc_id_dict = { old: new for old, new in old_to_new_slice_ids.items() if new in slice_ids } else: slc_id_dict = {sid: sid for sid in slice_ids} new_filter_scopes = copy_filter_scopes( old_to_new_slc_id_dict=slc_id_dict, old_filter_scopes=json.loads(data["filter_scopes"] or "{}"), ) if new_filter_scopes: md["filter_scopes"] = new_filter_scopes else: md.pop("filter_scopes", None) md["expanded_slices"] = data.get("expanded_slices", {}) md["refresh_frequency"] = data.get("refresh_frequency", 0) default_filters_data = json.loads(data.get("default_filters", "{}")) applicable_filters = { key: v for key, v in default_filters_data.items() if int(key) in slice_ids } md["default_filters"] = json.dumps(applicable_filters) md["color_scheme"] = data.get("color_scheme") if data.get("color_namespace"): md["color_namespace"] = data.get("color_namespace") if data.get("label_colors"): md["label_colors"] = data.get("label_colors") dashboard.json_metadata = json.dumps(md)
def load_unicode_test_data(only_metadata=False, force=False): """Loading unicode test dataset from a csv file in the repo""" tbl_name = "unicode_test" database = utils.get_example_database() table_exists = database.has_table_by_name(tbl_name) if not only_metadata and (not table_exists or force): data = get_example_data("unicode_utf8_unixnl_test.csv", is_gzip=False, make_bytes=True) df = pd.read_csv(data, encoding="utf-8") # generate date/numeric data df["dttm"] = datetime.datetime.now().date() df["value"] = [random.randint(1, 100) for _ in range(len(df))] df.to_sql( # pylint: disable=no-member tbl_name, database.get_sqla_engine(), if_exists="replace", chunksize=500, dtype={ "phrase": String(500), "short_phrase": String(10), "with_missing": String(100), "dttm": Date(), "value": Float(), }, index=False, ) print("Done loading table!") print("-" * 80) print("Creating table [unicode_test] reference") obj = db.session.query(TBL).filter_by(table_name=tbl_name).first() if not obj: obj = TBL(table_name=tbl_name) obj.main_dttm_col = "dttm" obj.database = database db.session.merge(obj) db.session.commit() obj.fetch_metadata() tbl = obj slice_data = { "granularity_sqla": "dttm", "groupby": [], "metric": { "aggregate": "SUM", "column": { "column_name": "value" }, "expressionType": "SIMPLE", "label": "Value", }, "row_limit": config["ROW_LIMIT"], "since": "100 years ago", "until": "now", "viz_type": "word_cloud", "size_from": "10", "series": "short_phrase", "size_to": "70", "rotation": "square", "limit": "100", } print("Creating a slice") slc = Slice( slice_name="Unicode Cloud", viz_type="word_cloud", datasource_type="table", datasource_id=tbl.id, params=get_slice_json(slice_data), ) merge_slice(slc) print("Creating a dashboard") dash = db.session.query(Dashboard).filter_by(slug="unicode-test").first() if not dash: dash = Dashboard() js = """\ { "CHART-Hkx6154FEm": { "children": [], "id": "CHART-Hkx6154FEm", "meta": { "chartId": 2225, "height": 30, "sliceName": "slice 1", "width": 4 }, "type": "CHART" }, "GRID_ID": { "children": [ "ROW-SyT19EFEQ" ], "id": "GRID_ID", "type": "GRID" }, "ROOT_ID": { "children": [ "GRID_ID" ], "id": "ROOT_ID", "type": "ROOT" }, "ROW-SyT19EFEQ": { "children": [ "CHART-Hkx6154FEm" ], "id": "ROW-SyT19EFEQ", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "type": "ROW" }, "DASHBOARD_VERSION_KEY": "v2" } """ dash.dashboard_title = "Unicode Test" pos = json.loads(js) update_slice_ids(pos, [slc]) dash.position_json = json.dumps(pos, indent=4) dash.slug = "unicode-test" dash.slices = [slc] db.session.merge(dash) db.session.commit()
def load_tabbed_dashboard(_: bool = False) -> None: """Creating a tabbed dashboard""" print("Creating a dashboard with nested tabs") slug = "tabbed_dash" dash = db.session.query(Dashboard).filter_by(slug=slug).first() if not dash: dash = Dashboard() js = textwrap.dedent( """ { "CHART-06Kg-rUggO": { "children": [], "id": "CHART-06Kg-rUggO", "meta": { "chartId": 617, "height": 42, "sliceName": "Number of Girls", "width": 4 }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "ROW-kHj58UJg5N", "COLUMN-_o23occSTg", "TABS-CslNeIC6x8", "TAB-SDz1jDqYZ2", "ROW-DnYkJgKQE" ], "type": "CHART" }, "CHART-E4rQMdzY9-": { "children": [], "id": "CHART-E4rQMdzY9-", "meta": { "chartId": 616, "height": 41, "sliceName": "Names Sorted by Num in California", "width": 4 }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "ROW-kHj58UJg5N", "COLUMN-_o23occSTg", "TABS-CslNeIC6x8", "TAB-SDz1jDqYZ2", "ROW-DnYkJgKQE" ], "type": "CHART" }, "CHART-WO52N6b5de": { "children": [], "id": "CHART-WO52N6b5de", "meta": { "chartId": 615, "height": 41, "sliceName": "Top 10 California Names Timeseries", "width": 8 }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "ROW-kHj58UJg5N", "COLUMN-_o23occSTg", "TABS-CslNeIC6x8", "TAB-t54frVKlx", "ROW-ghqEVzr2fA" ], "type": "CHART" }, "CHART-c0EjR-OZ0n": { "children": [], "id": "CHART-c0EjR-OZ0n", "meta": { "chartId": 598, "height": 50, "sliceName": "Treemap", "width": 4 }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "ROW-kHj58UJg5N", "COLUMN-RGd6kjW57J" ], "type": "CHART" }, "CHART-dxV7Il74hH": { "children": [], "id": "CHART-dxV7Il74hH", "meta": { "chartId": 597, "height": 50, "sliceName": "Box plot", "width": 4 }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-gcQJxApOZS", "TABS-afnrUvdxYF", "TAB-jNNd4WWar1", "ROW-7ygtDczaQ" ], "type": "CHART" }, "CHART-jJ5Yj1Ptaz": { "children": [], "id": "CHART-jJ5Yj1Ptaz", "meta": { "chartId": 592, "height": 29, "sliceName": "Growth Rate", "width": 5 }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj", "TAB-z81Q87PD7", "ROW-G73z9PIHn" ], "type": "CHART" }, "CHART-z4gmEuCqQ5": { "children": [], "id": "CHART-z4gmEuCqQ5", "meta": { "chartId": 589, "height": 50, "sliceName": "Region Filter", "width": 4 }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj", "TAB-EcNm_wh922", "ROW-LCjsdSetJ" ], "type": "CHART" }, "COLUMN-RGd6kjW57J": { "children": ["CHART-c0EjR-OZ0n"], "id": "COLUMN-RGd6kjW57J", "meta": { "background": "BACKGROUND_TRANSPARENT", "width": 4 }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "ROW-kHj58UJg5N" ], "type": "COLUMN" }, "COLUMN-V6vsdWdOEJ": { "children": ["TABS-urzRuDRusW"], "id": "COLUMN-V6vsdWdOEJ", "meta": { "background": "BACKGROUND_TRANSPARENT", "width": 7 }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj", "TAB-z81Q87PD7", "ROW-G73z9PIHn" ], "type": "COLUMN" }, "COLUMN-_o23occSTg": { "children": ["TABS-CslNeIC6x8"], "id": "COLUMN-_o23occSTg", "meta": { "background": "BACKGROUND_TRANSPARENT", "width": 8 }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "ROW-kHj58UJg5N" ], "type": "COLUMN" }, "DASHBOARD_VERSION_KEY": "v2", "GRID_ID": { "children": [], "id": "GRID_ID", "type": "GRID" }, "HEADER_ID": { "id": "HEADER_ID", "type": "HEADER", "meta": { "text": "Tabbed Dashboard" } }, "ROOT_ID": { "children": ["TABS-lV0r00f4H1"], "id": "ROOT_ID", "type": "ROOT" }, "ROW-7ygtDczaQ": { "children": ["CHART-dxV7Il74hH"], "id": "ROW-7ygtDczaQ", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-gcQJxApOZS", "TABS-afnrUvdxYF", "TAB-jNNd4WWar1" ], "type": "ROW" }, "ROW-DnYkJgKQE": { "children": ["CHART-06Kg-rUggO", "CHART-E4rQMdzY9-"], "id": "ROW-DnYkJgKQE", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "ROW-kHj58UJg5N", "COLUMN-_o23occSTg", "TABS-CslNeIC6x8", "TAB-SDz1jDqYZ2" ], "type": "ROW" }, "ROW-G73z9PIHn": { "children": ["CHART-jJ5Yj1Ptaz", "COLUMN-V6vsdWdOEJ"], "id": "ROW-G73z9PIHn", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj", "TAB-z81Q87PD7" ], "type": "ROW" }, "ROW-LCjsdSetJ": { "children": ["CHART-z4gmEuCqQ5"], "id": "ROW-LCjsdSetJ", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj", "TAB-EcNm_wh922" ], "type": "ROW" }, "ROW-ghqEVzr2fA": { "children": ["CHART-WO52N6b5de"], "id": "ROW-ghqEVzr2fA", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "ROW-kHj58UJg5N", "COLUMN-_o23occSTg", "TABS-CslNeIC6x8", "TAB-t54frVKlx" ], "type": "ROW" }, "ROW-kHj58UJg5N": { "children": ["COLUMN-RGd6kjW57J", "COLUMN-_o23occSTg"], "id": "ROW-kHj58UJg5N", "meta": { "background": "BACKGROUND_TRANSPARENT" }, "parents": ["ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS"], "type": "ROW" }, "TAB-0yhA2SgdPg": { "children": ["ROW-Gr9YPyQGwf"], "id": "TAB-0yhA2SgdPg", "meta": { "defaultText": "Tab title", "placeholder": "Tab title", "text": "Level 2 nested tab 1" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj", "TAB-z81Q87PD7", "ROW-G73z9PIHn", "COLUMN-V6vsdWdOEJ", "TABS-urzRuDRusW" ], "type": "TAB" }, "TAB-3a1Gvm-Ef": { "children": [], "id": "TAB-3a1Gvm-Ef", "meta": { "defaultText": "Tab title", "placeholder": "Tab title", "text": "Level 2 nested tab 2" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj", "TAB-z81Q87PD7", "ROW-G73z9PIHn", "COLUMN-V6vsdWdOEJ", "TABS-urzRuDRusW" ], "type": "TAB" }, "TAB-EcNm_wh922": { "children": ["ROW-LCjsdSetJ"], "id": "TAB-EcNm_wh922", "meta": { "text": "row tab 1" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj" ], "type": "TAB" }, "TAB-NF3dlrWGS": { "children": ["ROW-kHj58UJg5N", "TABS-CSjo6VfNrj"], "id": "TAB-NF3dlrWGS", "meta": { "text": "Tab A" }, "parents": ["ROOT_ID", "TABS-lV0r00f4H1"], "type": "TAB" }, "TAB-SDz1jDqYZ2": { "children": ["ROW-DnYkJgKQE"], "id": "TAB-SDz1jDqYZ2", "meta": { "defaultText": "Tab title", "placeholder": "Tab title", "text": "Nested tab 1" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "ROW-kHj58UJg5N", "COLUMN-_o23occSTg", "TABS-CslNeIC6x8" ], "type": "TAB" }, "TAB-gcQJxApOZS": { "children": ["TABS-afnrUvdxYF"], "id": "TAB-gcQJxApOZS", "meta": { "text": "Tab B" }, "parents": ["ROOT_ID", "TABS-lV0r00f4H1"], "type": "TAB" }, "TAB-jNNd4WWar1": { "children": ["ROW-7ygtDczaQ"], "id": "TAB-jNNd4WWar1", "meta": { "text": "New Tab" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-gcQJxApOZS", "TABS-afnrUvdxYF" ], "type": "TAB" }, "TAB-t54frVKlx": { "children": ["ROW-ghqEVzr2fA"], "id": "TAB-t54frVKlx", "meta": { "defaultText": "Tab title", "placeholder": "Tab title", "text": "Nested tab 2" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "ROW-kHj58UJg5N", "COLUMN-_o23occSTg", "TABS-CslNeIC6x8" ], "type": "TAB" }, "TAB-z81Q87PD7": { "children": ["ROW-G73z9PIHn"], "id": "TAB-z81Q87PD7", "meta": { "text": "row tab 2" }, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj" ], "type": "TAB" }, "TABS-CSjo6VfNrj": { "children": ["TAB-EcNm_wh922", "TAB-z81Q87PD7"], "id": "TABS-CSjo6VfNrj", "meta": {}, "parents": ["ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS"], "type": "TABS" }, "TABS-CslNeIC6x8": { "children": ["TAB-SDz1jDqYZ2", "TAB-t54frVKlx"], "id": "TABS-CslNeIC6x8", "meta": {}, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "ROW-kHj58UJg5N", "COLUMN-_o23occSTg" ], "type": "TABS" }, "TABS-afnrUvdxYF": { "children": ["TAB-jNNd4WWar1"], "id": "TABS-afnrUvdxYF", "meta": {}, "parents": ["ROOT_ID", "TABS-lV0r00f4H1", "TAB-gcQJxApOZS"], "type": "TABS" }, "TABS-lV0r00f4H1": { "children": ["TAB-NF3dlrWGS", "TAB-gcQJxApOZS"], "id": "TABS-lV0r00f4H1", "meta": {}, "parents": ["ROOT_ID"], "type": "TABS" }, "TABS-urzRuDRusW": { "children": ["TAB-0yhA2SgdPg", "TAB-3a1Gvm-Ef"], "id": "TABS-urzRuDRusW", "meta": {}, "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj", "TAB-z81Q87PD7", "ROW-G73z9PIHn", "COLUMN-V6vsdWdOEJ" ], "type": "TABS" }, "CHART-p4_VUp8w3w": { "type": "CHART", "id": "CHART-p4_VUp8w3w", "children": [], "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj", "TAB-z81Q87PD7", "ROW-G73z9PIHn", "COLUMN-V6vsdWdOEJ", "TABS-urzRuDRusW", "TAB-0yhA2SgdPg", "ROW-Gr9YPyQGwf" ], "meta": { "width": 4, "height": 20, "chartId": 614, "sliceName": "Number of California Births" } }, "ROW-Gr9YPyQGwf": { "type": "ROW", "id": "ROW-Gr9YPyQGwf", "children": ["CHART-p4_VUp8w3w"], "parents": [ "ROOT_ID", "TABS-lV0r00f4H1", "TAB-NF3dlrWGS", "TABS-CSjo6VfNrj", "TAB-z81Q87PD7", "ROW-G73z9PIHn", "COLUMN-V6vsdWdOEJ", "TABS-urzRuDRusW", "TAB-0yhA2SgdPg" ], "meta": { "background": "BACKGROUND_TRANSPARENT" } } }""" ) pos = json.loads(js) slices = update_slice_ids(pos) dash.position_json = json.dumps(pos, indent=4) dash.slices = slices dash.dashboard_title = "Tabbed Dashboard" dash.slug = slug db.session.merge(dash) db.session.commit()