コード例 #1
0
    def test_sync_druid_perm(self, PyDruid):
        self.login(username="******")
        instance = PyDruid.return_value
        instance.time_boundary.return_value = [{"result": {"maxTime": "2016-01-01"}}]
        instance.segment_metadata.return_value = SEGMENT_METADATA

        cluster = (
            db.session.query(DruidCluster)
            .filter_by(cluster_name="test_cluster")
            .first()
        )
        if cluster:
            for datasource in (
                db.session.query(DruidDatasource).filter_by(cluster_id=cluster.id).all()
            ):
                db.session.delete(datasource)

            db.session.delete(cluster)
        db.session.commit()

        cluster = DruidCluster(
            cluster_name="test_cluster",
            broker_host="localhost",
            broker_port=7980,
            metadata_last_refreshed=datetime.now(),
        )

        db.session.add(cluster)
        cluster.get_datasources = PickableMock(return_value=["test_datasource"])

        cluster.refresh_datasources()
        cluster.datasources[0].merge_flag = True
        metadata = cluster.datasources[0].latest_metadata()
        self.assertEqual(len(metadata), 4)
        db.session.commit()

        view_menu_name = cluster.datasources[0].get_perm()
        view_menu = security_manager.find_view_menu(view_menu_name)
        permission = security_manager.find_permission("datasource_access")

        pv = (
            security_manager.get_session.query(security_manager.permissionview_model)
            .filter_by(permission=permission, view_menu=view_menu)
            .first()
        )
        assert pv is not None
コード例 #2
0
    def test_sync_druid_perm(self, PyDruid):
        self.login(username='******')
        instance = PyDruid.return_value
        instance.time_boundary.return_value = [
            {'result': {'maxTime': '2016-01-01'}}]
        instance.segment_metadata.return_value = SEGMENT_METADATA

        cluster = (
            db.session
            .query(DruidCluster)
            .filter_by(cluster_name='test_cluster')
            .first()
        )
        if cluster:
            db.session.delete(cluster)
        db.session.commit()

        cluster = DruidCluster(
            cluster_name='test_cluster',
            coordinator_host='localhost',
            coordinator_port=7979,
            broker_host='localhost',
            broker_port=7980,
            metadata_last_refreshed=datetime.now())

        db.session.add(cluster)
        cluster.get_datasources = PickableMock(
            return_value=['test_datasource'],
        )
        cluster.get_druid_version = PickableMock(return_value='0.9.1')

        cluster.refresh_datasources()
        cluster.datasources[0].merge_flag = True
        metadata = cluster.datasources[0].latest_metadata()
        self.assertEqual(len(metadata), 4)
        db.session.commit()

        view_menu_name = cluster.datasources[0].get_perm()
        view_menu = sm.find_view_menu(view_menu_name)
        permission = sm.find_permission('datasource_access')

        pv = sm.get_session.query(sm.permissionview_model).filter_by(
            permission=permission, view_menu=view_menu).first()
        assert pv is not None
コード例 #3
0
    def test_client(self, PyDruid):
        self.login(username='******')
        instance = PyDruid.return_value
        instance.time_boundary.return_value = [
            {'result': {'maxTime': '2016-01-01'}}]
        instance.segment_metadata.return_value = SEGMENT_METADATA

        cluster = (
            db.session
            .query(DruidCluster)
            .filter_by(cluster_name='test_cluster')
            .first()
        )
        if cluster:
            db.session.delete(cluster)
        db.session.commit()

        cluster = DruidCluster(
            cluster_name='test_cluster',
            coordinator_host='localhost',
            coordinator_port=7979,
            broker_host='localhost',
            broker_port=7980,
            metadata_last_refreshed=datetime.now())

        db.session.add(cluster)
        cluster.get_datasources = PickableMock(return_value=['test_datasource'])
        cluster.get_druid_version = PickableMock(return_value='0.9.1')
        cluster.refresh_datasources()
        cluster.refresh_datasources(merge_flag=True)
        datasource_id = cluster.datasources[0].id
        db.session.commit()

        nres = [
            list(v['event'].items()) + [('timestamp', v['timestamp'])]
            for v in GB_RESULT_SET]
        nres = [dict(v) for v in nres]
        import pandas as pd
        df = pd.DataFrame(nres)
        instance.export_pandas.return_value = df
        instance.query_dict = {}
        instance.query_builder.last_query.query_dict = {}

        resp = self.get_resp('/superset/explore/druid/{}/'.format(
            datasource_id))
        self.assertIn('test_datasource', resp)
        form_data = {
            'viz_type': 'table',
            'granularity': 'one+day',
            'druid_time_origin': '',
            'since': '7+days+ago',
            'until': 'now',
            'row_limit': 5000,
            'include_search': 'false',
            'metrics': ['count'],
            'groupby': ['dim1'],
            'force': 'true',
        }
        # One groupby
        url = ('/superset/explore_json/druid/{}/'.format(datasource_id))
        resp = self.get_json_resp(url, {'form_data': json.dumps(form_data)})
        self.assertEqual('Canada', resp['data']['records'][0]['dim1'])

        form_data = {
            'viz_type': 'table',
            'granularity': 'one+day',
            'druid_time_origin': '',
            'since': '7+days+ago',
            'until': 'now',
            'row_limit': 5000,
            'include_search': 'false',
            'metrics': ['count'],
            'groupby': ['dim1', 'dim2d'],
            'force': 'true',
        }
        # two groupby
        url = ('/superset/explore_json/druid/{}/'.format(datasource_id))
        resp = self.get_json_resp(url, {'form_data': json.dumps(form_data)})
        self.assertEqual('Canada', resp['data']['records'][0]['dim1'])