def refresh_druid(datasource: str, merge: bool) -> None: """Refresh druid datasources""" session = db.session() from superset.connectors.druid.models import DruidCluster for cluster in session.query(DruidCluster).all(): try: cluster.refresh_datasources(datasource_name=datasource, merge_flag=merge) except Exception as ex: # pylint: disable=broad-except print("Error while processing cluster '{}'\n{}".format(cluster, str(ex))) logger.exception(ex) cluster.metadata_last_refreshed = datetime.now() print("Refreshed metadata from cluster " "[" + cluster.cluster_name + "]") session.commit()
def set_dash_metadata( dashboard: Dashboard, data: Dict[Any, Any], old_to_new_slice_ids: Optional[Dict[int, int]] = None, ) -> None: positions = data["positions"] # find slices in the position data slice_ids = [ value.get("meta", {}).get("chartId") for value in positions.values() if isinstance(value, dict) ] session = db.session() current_slices = session.query(Slice).filter( Slice.id.in_(slice_ids)).all() dashboard.slices = current_slices # add UUID to positions uuid_map = {slice.id: str(slice.uuid) for slice in current_slices} for obj in positions.values(): if (isinstance(obj, dict) and obj["type"] == "CHART" and obj["meta"]["chartId"]): chart_id = obj["meta"]["chartId"] obj["meta"]["uuid"] = uuid_map.get(chart_id) # remove leading and trailing white spaces in the dumped json dashboard.position_json = json.dumps(positions, indent=None, separators=(",", ":"), sort_keys=True) md = dashboard.params_dict dashboard.css = data.get("css") dashboard.dashboard_title = data["dashboard_title"] if "timed_refresh_immune_slices" not in md: md["timed_refresh_immune_slices"] = [] new_filter_scopes = {} if "filter_scopes" in data: # replace filter_id and immune ids from old slice id to new slice id: # and remove slice ids that are not in dash anymore slc_id_dict: Dict[int, int] = {} if old_to_new_slice_ids: slc_id_dict = { old: new for old, new in old_to_new_slice_ids.items() if new in slice_ids } else: slc_id_dict = {sid: sid for sid in slice_ids} new_filter_scopes = copy_filter_scopes( old_to_new_slc_id_dict=slc_id_dict, old_filter_scopes=json.loads(data["filter_scopes"] or "{}"), ) if new_filter_scopes: md["filter_scopes"] = new_filter_scopes else: md.pop("filter_scopes", None) md["expanded_slices"] = data.get("expanded_slices", {}) md["refresh_frequency"] = data.get("refresh_frequency", 0) default_filters_data = json.loads(data.get("default_filters", "{}")) applicable_filters = { key: v for key, v in default_filters_data.items() if int(key) in slice_ids } md["default_filters"] = json.dumps(applicable_filters) md["color_scheme"] = data.get("color_scheme") if data.get("color_namespace"): md["color_namespace"] = data.get("color_namespace") if data.get("label_colors"): md["label_colors"] = data.get("label_colors") dashboard.json_metadata = json.dumps(md)
def set_dash_metadata( # pylint: disable=too-many-locals dashboard: Dashboard, data: Dict[Any, Any], old_to_new_slice_ids: Optional[Dict[int, int]] = None, commit: bool = False, ) -> Dashboard: positions = data.get("positions") new_filter_scopes = {} md = dashboard.params_dict if positions is not None: # find slices in the position data slice_ids = [ value.get("meta", {}).get("chartId") for value in positions.values() if isinstance(value, dict) ] session = db.session() current_slices = session.query(Slice).filter( Slice.id.in_(slice_ids)).all() dashboard.slices = current_slices # add UUID to positions uuid_map = {slice.id: str(slice.uuid) for slice in current_slices} for obj in positions.values(): if (isinstance(obj, dict) and obj["type"] == "CHART" and obj["meta"]["chartId"]): chart_id = obj["meta"]["chartId"] obj["meta"]["uuid"] = uuid_map.get(chart_id) # remove leading and trailing white spaces in the dumped json dashboard.position_json = json.dumps(positions, indent=None, separators=(",", ":"), sort_keys=True) if "filter_scopes" in data: # replace filter_id and immune ids from old slice id to new slice id: # and remove slice ids that are not in dash anymore slc_id_dict: Dict[int, int] = {} if old_to_new_slice_ids: slc_id_dict = { old: new for old, new in old_to_new_slice_ids.items() if new in slice_ids } else: slc_id_dict = {sid: sid for sid in slice_ids} new_filter_scopes = copy_filter_scopes( old_to_new_slc_id_dict=slc_id_dict, old_filter_scopes=json.loads(data["filter_scopes"] or "{}") if isinstance(data["filter_scopes"], str) else data["filter_scopes"], ) default_filters_data = json.loads(data.get("default_filters", "{}")) applicable_filters = { key: v for key, v in default_filters_data.items() if int(key) in slice_ids } md["default_filters"] = json.dumps(applicable_filters) # positions have its own column, no need to store it in metadata md.pop("positions", None) # The css and dashboard_title properties are not part of the metadata # TODO (geido): remove by refactoring/deprecating save_dash endpoint if data.get("css") is not None: dashboard.css = data.get("css") if data.get("dashboard_title") is not None: dashboard.dashboard_title = data.get("dashboard_title") if new_filter_scopes: md["filter_scopes"] = new_filter_scopes else: md.pop("filter_scopes", None) md.setdefault("timed_refresh_immune_slices", []) if data.get("color_namespace") is None: md.pop("color_namespace", None) else: md["color_namespace"] = data.get("color_namespace") md["expanded_slices"] = data.get("expanded_slices", {}) md["refresh_frequency"] = data.get("refresh_frequency", 0) md["color_scheme"] = data.get("color_scheme", "") md["label_colors"] = data.get("label_colors", {}) md["shared_label_colors"] = data.get("shared_label_colors", {}) dashboard.json_metadata = json.dumps(md) if commit: db.session.commit() return dashboard
def set_dash_metadata( # pylint: disable=too-many-locals,too-many-branches,too-many-statements dashboard: Dashboard, data: Dict[Any, Any], old_to_new_slice_ids: Optional[Dict[int, int]] = None, ) -> None: positions = data["positions"] # find slices in the position data slice_ids = [] slice_id_to_name = {} for value in positions.values(): if isinstance(value, dict): try: slice_id = value["meta"]["chartId"] slice_ids.append(slice_id) slice_id_to_name[slice_id] = value["meta"]["sliceName"] except KeyError: pass session = db.session() current_slices = session.query(Slice).filter( Slice.id.in_(slice_ids)).all() dashboard.slices = current_slices # update slice names. this assumes user has permissions to update the slice # we allow user set slice name be empty string for slc in dashboard.slices: try: new_name = slice_id_to_name[slc.id] if slc.slice_name != new_name: slc.slice_name = new_name session.merge(slc) session.flush() except KeyError: pass # remove leading and trailing white spaces in the dumped json dashboard.position_json = json.dumps(positions, indent=None, separators=(",", ":"), sort_keys=True) md = dashboard.params_dict dashboard.css = data.get("css") dashboard.dashboard_title = data["dashboard_title"] if "timed_refresh_immune_slices" not in md: md["timed_refresh_immune_slices"] = [] new_filter_scopes = {} if "filter_scopes" in data: # replace filter_id and immune ids from old slice id to new slice id: # and remove slice ids that are not in dash anymore slc_id_dict: Dict[int, int] = {} if old_to_new_slice_ids: slc_id_dict = { old: new for old, new in old_to_new_slice_ids.items() if new in slice_ids } else: slc_id_dict = {sid: sid for sid in slice_ids} new_filter_scopes = copy_filter_scopes( old_to_new_slc_id_dict=slc_id_dict, old_filter_scopes=json.loads(data["filter_scopes"] or "{}"), ) if new_filter_scopes: md["filter_scopes"] = new_filter_scopes else: md.pop("filter_scopes", None) md["expanded_slices"] = data.get("expanded_slices", {}) md["refresh_frequency"] = data.get("refresh_frequency", 0) default_filters_data = json.loads(data.get("default_filters", "{}")) applicable_filters = { key: v for key, v in default_filters_data.items() if int(key) in slice_ids } md["default_filters"] = json.dumps(applicable_filters) md["color_scheme"] = data.get("color_scheme") if data.get("color_namespace"): md["color_namespace"] = data.get("color_namespace") if data.get("label_colors"): md["label_colors"] = data.get("label_colors") dashboard.json_metadata = json.dumps(md)