def import_from_dict(session, data, sync=[]): """Imports databases and druid clusters from dictionary""" if isinstance(data, dict): logging.info('Importing %d %s', len(data.get(DATABASES_KEY, [])), DATABASES_KEY) for database in data.get(DATABASES_KEY, []): Database.import_from_dict(session, database, sync=sync) logging.info('Importing %d %s', len(data.get(DRUID_CLUSTERS_KEY, [])), DRUID_CLUSTERS_KEY) for datasource in data.get(DRUID_CLUSTERS_KEY, []): DruidCluster.import_from_dict(session, datasource, sync=sync) session.commit() else: logging.info('Supplied object is not a dictionary.')
def test_set_perm_druid_cluster(self): session = db.session cluster = DruidCluster(cluster_name="tmp_druid_cluster") session.add(cluster) stored_cluster = ( session.query(DruidCluster) .filter_by(cluster_name="tmp_druid_cluster") .one() ) self.assertEquals( stored_cluster.perm, f"[tmp_druid_cluster].(id:{stored_cluster.id})" ) self.assertIsNotNone( security_manager.find_permission_view_menu( "database_access", stored_cluster.perm ) ) stored_cluster.cluster_name = "tmp_druid_cluster2" session.commit() self.assertEquals( stored_cluster.perm, f"[tmp_druid_cluster2].(id:{stored_cluster.id})" ) self.assertIsNotNone( security_manager.find_permission_view_menu( "database_access", stored_cluster.perm ) ) session.delete(stored_cluster) session.commit()
def decode_dashboards( # pylint: disable=too-many-return-statements o: Dict[str, Any]) -> Any: """ Function to be passed into json.loads obj_hook parameter Recreates the dashboard object from a json representation. """ from superset.connectors.druid.models import ( DruidCluster, DruidColumn, DruidDatasource, DruidMetric, ) if "__Dashboard__" in o: return Dashboard(**o["__Dashboard__"]) if "__Slice__" in o: return Slice(**o["__Slice__"]) if "__TableColumn__" in o: return TableColumn(**o["__TableColumn__"]) if "__SqlaTable__" in o: return SqlaTable(**o["__SqlaTable__"]) if "__SqlMetric__" in o: return SqlMetric(**o["__SqlMetric__"]) if "__DruidCluster__" in o: return DruidCluster(**o["__DruidCluster__"]) if "__DruidColumn__" in o: return DruidColumn(**o["__DruidColumn__"]) if "__DruidDatasource__" in o: return DruidDatasource(**o["__DruidDatasource__"]) if "__DruidMetric__" in o: return DruidMetric(**o["__DruidMetric__"]) if "__datetime__" in o: return datetime.strptime(o["__datetime__"], "%Y-%m-%dT%H:%M:%S") return o
def __init__(self, *args, **kwargs): if (self.requires_examples and not os.environ.get('examples_loaded')): logging.info('Loading examples') cli.load_examples_run(load_test_data=True) logging.info('Done loading examples') security_manager.sync_role_definitions() os.environ['examples_loaded'] = '1' else: security_manager.sync_role_definitions() super(SupersetTestCase, self).__init__(*args, **kwargs) self.client = app.test_client() self.maxDiff = None cli.load_test_users_run() # create druid cluster and druid datasources session = db.session cluster = (session.query(DruidCluster).filter_by( cluster_name='druid_test').first()) if not cluster: cluster = DruidCluster(cluster_name='druid_test') session.add(cluster) session.commit() druid_datasource1 = DruidDatasource( datasource_name='druid_ds_1', cluster_name='druid_test', ) session.add(druid_datasource1) druid_datasource2 = DruidDatasource( datasource_name='druid_ds_2', cluster_name='druid_test', ) session.add(druid_datasource2) session.commit()
def get_test_cluster_obj(self): return DruidCluster( cluster_name='test_cluster', broker_host='localhost', broker_port=7980, broker_endpoint='druid/v2', metadata_last_refreshed=datetime.now())
def get_test_cluster_obj(self): return DruidCluster( cluster_name="test_cluster", broker_host="localhost", broker_port=7980, broker_endpoint="druid/v2", metadata_last_refreshed=datetime.now(), )
def test_sync_druid_perm(self, PyDruid): self.login(username="******") instance = PyDruid.return_value instance.time_boundary.return_value = [{"result": {"maxTime": "2016-01-01"}}] instance.segment_metadata.return_value = SEGMENT_METADATA cluster = ( db.session.query(DruidCluster) .filter_by(cluster_name="test_cluster") .first() ) if cluster: for datasource in ( db.session.query(DruidDatasource).filter_by(cluster_id=cluster.id).all() ): db.session.delete(datasource) db.session.delete(cluster) db.session.commit() cluster = DruidCluster( cluster_name="test_cluster", broker_host="localhost", broker_port=7980, metadata_last_refreshed=datetime.now(), ) db.session.add(cluster) cluster.get_datasources = PickableMock(return_value=["test_datasource"]) cluster.refresh_datasources() cluster.datasources[0].merge_flag = True metadata = cluster.datasources[0].latest_metadata() self.assertEqual(len(metadata), 4) db.session.commit() view_menu_name = cluster.datasources[0].get_perm() view_menu = security_manager.find_view_menu(view_menu_name) permission = security_manager.find_permission("datasource_access") pv = ( security_manager.get_session.query(security_manager.permissionview_model) .filter_by(permission=permission, view_menu=view_menu) .first() ) assert pv is not None
def import_from_dict(data: Dict[str, Any], sync: Optional[List[str]] = None) -> None: """Imports databases and druid clusters from dictionary""" if not sync: sync = [] if isinstance(data, dict): logger.info("Importing %d %s", len(data.get(DATABASES_KEY, [])), DATABASES_KEY) for database in data.get(DATABASES_KEY, []): Database.import_from_dict(database, sync=sync) logger.info("Importing %d %s", len(data.get(DRUID_CLUSTERS_KEY, [])), DRUID_CLUSTERS_KEY) for datasource in data.get(DRUID_CLUSTERS_KEY, []): DruidCluster.import_from_dict(datasource, sync=sync) db.session.commit() else: logger.info("Supplied object is not a dictionary.")
def test_sync_druid_perm(self, PyDruid): self.login(username='******') instance = PyDruid.return_value instance.time_boundary.return_value = [ {'result': {'maxTime': '2016-01-01'}}] instance.segment_metadata.return_value = SEGMENT_METADATA cluster = ( db.session .query(DruidCluster) .filter_by(cluster_name='test_cluster') .first() ) if cluster: db.session.delete(cluster) db.session.commit() cluster = DruidCluster( cluster_name='test_cluster', coordinator_host='localhost', coordinator_port=7979, broker_host='localhost', broker_port=7980, metadata_last_refreshed=datetime.now()) db.session.add(cluster) cluster.get_datasources = PickableMock( return_value=['test_datasource'], ) cluster.refresh_datasources() cluster.datasources[0].merge_flag = True metadata = cluster.datasources[0].latest_metadata() self.assertEqual(len(metadata), 4) db.session.commit() view_menu_name = cluster.datasources[0].get_perm() view_menu = security_manager.find_view_menu(view_menu_name) permission = security_manager.find_permission('datasource_access') pv = security_manager.get_session.query( security_manager.permissionview_model).filter_by( permission=permission, view_menu=view_menu).first() assert pv is not None
def test_sync_druid_perm(self, PyDruid): self.login(username='******') instance = PyDruid.return_value instance.time_boundary.return_value = [ {'result': {'maxTime': '2016-01-01'}}] instance.segment_metadata.return_value = SEGMENT_METADATA cluster = ( db.session .query(DruidCluster) .filter_by(cluster_name='test_cluster') .first() ) if cluster: db.session.delete(cluster) db.session.commit() cluster = DruidCluster( cluster_name='test_cluster', coordinator_host='localhost', coordinator_port=7979, broker_host='localhost', broker_port=7980, metadata_last_refreshed=datetime.now()) db.session.add(cluster) cluster.get_datasources = PickableMock( return_value=['test_datasource'], ) cluster.get_druid_version = PickableMock(return_value='0.9.1') cluster.refresh_datasources() cluster.datasources[0].merge_flag = True metadata = cluster.datasources[0].latest_metadata() self.assertEqual(len(metadata), 4) db.session.commit() view_menu_name = cluster.datasources[0].get_perm() view_menu = sm.find_view_menu(view_menu_name) permission = sm.find_permission('datasource_access') pv = sm.get_session.query(sm.permissionview_model).filter_by( permission=permission, view_menu=view_menu).first() assert pv is not None
def export_schema_to_dict(back_references): """Exports the supported import/export schema to a dictionary""" databases = [Database.export_schema(recursive=True, include_parent_ref=back_references)] clusters = [DruidCluster.export_schema(recursive=True, include_parent_ref=back_references)] data = dict() if databases: data[DATABASES_KEY] = databases if clusters: data[DRUID_CLUSTERS_KEY] = clusters return data
def export_schema_to_dict(back_references): """Exports the supported import/export schema to a dictionary""" databases = [ Database.export_schema(recursive=True, include_parent_ref=back_references) ] clusters = [ DruidCluster.export_schema(recursive=True, include_parent_ref=back_references) ] data = dict() if databases: data[DATABASES_KEY] = databases if clusters: data[DRUID_CLUSTERS_KEY] = clusters return data
def test_hybrid_perm_druid_cluster(self): cluster = DruidCluster(cluster_name="tmp_druid_cluster3") db.session.add(cluster) id_ = (db.session.query(DruidCluster.id).filter_by( cluster_name="tmp_druid_cluster3").scalar()) record = (db.session.query(DruidCluster).filter_by( perm=f"[tmp_druid_cluster3].(id:{id_})").one()) self.assertEqual(record.get_perm(), record.perm) self.assertEqual(record.id, id_) self.assertEqual(record.cluster_name, "tmp_druid_cluster3") db.session.delete(cluster) db.session.commit()
def create_druid_test_objects(cls): # create druid cluster and druid datasources session = db.session cluster = (session.query(DruidCluster).filter_by( cluster_name="druid_test").first()) if not cluster: cluster = DruidCluster(cluster_name="druid_test") session.add(cluster) session.commit() druid_datasource1 = DruidDatasource(datasource_name="druid_ds_1", cluster_name="druid_test") session.add(druid_datasource1) druid_datasource2 = DruidDatasource(datasource_name="druid_ds_2", cluster_name="druid_test") session.add(druid_datasource2) session.commit()
def test_sync_druid_perm(self, PyDruid): self.login(username='******') instance = PyDruid.return_value instance.time_boundary.return_value = [ {'result': {'maxTime': '2016-01-01'}}] instance.segment_metadata.return_value = SEGMENT_METADATA cluster = ( db.session .query(DruidCluster) .filter_by(cluster_name='test_cluster') .first() ) if cluster: db.session.delete(cluster) db.session.commit() cluster = DruidCluster( cluster_name='test_cluster', coordinator_host='localhost', coordinator_port=7979, broker_host='localhost', broker_port=7980, metadata_last_refreshed=datetime.now()) db.session.add(cluster) cluster.get_datasources = Mock(return_value=['test_datasource']) cluster.get_druid_version = Mock(return_value='0.9.1') cluster.refresh_datasources() datasource_id = cluster.datasources[0].id db.session.commit() view_menu_name = cluster.datasources[0].get_perm() view_menu = sm.find_view_menu(view_menu_name) permission = sm.find_permission("datasource_access") pv = sm.get_session.query(sm.permissionview_model).filter_by( permission=permission, view_menu=view_menu).first() assert pv is not None
def test_client(self, PyDruid): self.login(username='******') instance = PyDruid.return_value instance.time_boundary.return_value = [ {'result': {'maxTime': '2016-01-01'}}] instance.segment_metadata.return_value = SEGMENT_METADATA cluster = ( db.session .query(DruidCluster) .filter_by(cluster_name='test_cluster') .first() ) if cluster: db.session.delete(cluster) db.session.commit() cluster = DruidCluster( cluster_name='test_cluster', coordinator_host='localhost', coordinator_port=7979, broker_host='localhost', broker_port=7980, metadata_last_refreshed=datetime.now()) db.session.add(cluster) cluster.get_datasources = PickableMock(return_value=['test_datasource']) cluster.get_druid_version = PickableMock(return_value='0.9.1') cluster.refresh_datasources() cluster.refresh_datasources(merge_flag=True) datasource_id = cluster.datasources[0].id db.session.commit() nres = [ list(v['event'].items()) + [('timestamp', v['timestamp'])] for v in GB_RESULT_SET] nres = [dict(v) for v in nres] import pandas as pd df = pd.DataFrame(nres) instance.export_pandas.return_value = df instance.query_dict = {} instance.query_builder.last_query.query_dict = {} resp = self.get_resp('/superset/explore/druid/{}/'.format( datasource_id)) self.assertIn('test_datasource', resp) form_data = { 'viz_type': 'table', 'granularity': 'one+day', 'druid_time_origin': '', 'since': '7+days+ago', 'until': 'now', 'row_limit': 5000, 'include_search': 'false', 'metrics': ['count'], 'groupby': ['dim1'], 'force': 'true', } # One groupby url = ('/superset/explore_json/druid/{}/'.format(datasource_id)) resp = self.get_json_resp(url, {'form_data': json.dumps(form_data)}) self.assertEqual('Canada', resp['data']['records'][0]['dim1']) form_data = { 'viz_type': 'table', 'granularity': 'one+day', 'druid_time_origin': '', 'since': '7+days+ago', 'until': 'now', 'row_limit': 5000, 'include_search': 'false', 'metrics': ['count'], 'groupby': ['dim1', 'dim2d'], 'force': 'true', } # two groupby url = ('/superset/explore_json/druid/{}/'.format(datasource_id)) resp = self.get_json_resp(url, {'form_data': json.dumps(form_data)}) self.assertEqual('Canada', resp['data']['records'][0]['dim1'])
def __init__(self, *args, **kwargs): if (self.requires_examples and not os.environ.get('examples_loaded')): logging.info('Loading examples') cli.load_examples_run(load_test_data=True) logging.info('Done loading examples') security_manager.sync_role_definitions() os.environ['examples_loaded'] = '1' else: security_manager.sync_role_definitions() super(SupersetTestCase, self).__init__(*args, **kwargs) self.client = app.test_client() self.maxDiff = None gamma_sqllab_role = security_manager.add_role('gamma_sqllab') for perm in security_manager.find_role('Gamma').permissions: security_manager.add_permission_role(gamma_sqllab_role, perm) utils.get_or_create_main_db() db_perm = self.get_main_database(security_manager.get_session).perm security_manager.merge_perm('database_access', db_perm) db_pvm = security_manager.find_permission_view_menu( view_menu_name=db_perm, permission_name='database_access') gamma_sqllab_role.permissions.append(db_pvm) for perm in security_manager.find_role('sql_lab').permissions: security_manager.add_permission_role(gamma_sqllab_role, perm) admin = security_manager.find_user('admin') if not admin: security_manager.add_user('admin', 'admin', ' user', '*****@*****.**', security_manager.find_role('Admin'), password='******') gamma = security_manager.find_user('gamma') if not gamma: security_manager.add_user('gamma', 'gamma', 'user', '*****@*****.**', security_manager.find_role('Gamma'), password='******') gamma2 = security_manager.find_user('gamma2') if not gamma2: security_manager.add_user('gamma2', 'gamma2', 'user', '*****@*****.**', security_manager.find_role('Gamma'), password='******') gamma_sqllab_user = security_manager.find_user('gamma_sqllab') if not gamma_sqllab_user: security_manager.add_user('gamma_sqllab', 'gamma_sqllab', 'user', '*****@*****.**', gamma_sqllab_role, password='******') alpha = security_manager.find_user('alpha') if not alpha: security_manager.add_user('alpha', 'alpha', 'user', '*****@*****.**', security_manager.find_role('Alpha'), password='******') security_manager.get_session.commit() # create druid cluster and druid datasources session = db.session cluster = (session.query(DruidCluster).filter_by( cluster_name='druid_test').first()) if not cluster: cluster = DruidCluster(cluster_name='druid_test') session.add(cluster) session.commit() druid_datasource1 = DruidDatasource( datasource_name='druid_ds_1', cluster_name='druid_test', ) session.add(druid_datasource1) druid_datasource2 = DruidDatasource( datasource_name='druid_ds_2', cluster_name='druid_test', ) session.add(druid_datasource2) session.commit()