Beispiel #1
0
def _get_slice_data(schedule):
    slc = schedule.slice

    slice_url = _get_url_path(
        'Superset.explore_json',
        csv='true',
        form_data=json.dumps({'slice_id': slc.id}),
    )

    # URL to include in the email
    url = _get_url_path(
        'Superset.slice',
        slice_id=slc.id,
    )

    cookies = {}
    for cookie in _get_auth_cookies():
        cookies['session'] = cookie

    response = requests.get(slice_url, cookies=cookies)
    response.raise_for_status()

    # TODO: Move to the csv module
    rows = [r.split(b',') for r in response.content.splitlines()]

    if schedule.delivery_type == EmailDeliveryType.inline:
        data = None

        # Parse the csv file and generate HTML
        columns = rows.pop(0)
        with app.app_context():
            body = render_template(
                'superset/reports/slice_data.html',
                columns=columns,
                rows=rows,
                name=slc.slice_name,
                link=url,
            )

    elif schedule.delivery_type == EmailDeliveryType.attachment:
        data = {
            __('%(name)s.csv', name=slc.slice_name): response.content,
        }
        body = __(
            '<b><a href="%(url)s">Explore in Superset</a></b><p></p>',
            name=slc.slice_name,
            url=url,
        )

    return EmailContent(body, data, None)
def _get_slice_data(slc: Slice, delivery_type: EmailDeliveryType,
                    session: Session) -> ReportContent:
    slice_url = _get_url_path("Superset.explore_json",
                              csv="true",
                              form_data=json.dumps({"slice_id": slc.id}))

    # URL to include in the email
    slice_url_user_friendly = _get_url_path("Superset.slice",
                                            slice_id=slc.id,
                                            user_friendly=True)

    # Login on behalf of the "reports" user in order to get cookies to deal with auth
    auth_cookies = machine_auth_provider_factory.instance.get_auth_cookies(
        get_reports_user(session))
    # Build something like "session=cool_sess.val;other-cookie=awesome_other_cookie"
    cookie_str = ";".join(
        [f"{key}={val}" for key, val in auth_cookies.items()])

    opener = urllib.request.build_opener()
    opener.addheaders.append(("Cookie", cookie_str))
    response = opener.open(slice_url)
    if response.getcode() != 200:
        raise URLError(response.getcode())

    # TODO: Move to the csv module
    content = response.read()
    rows = [r.split(b",") for r in content.splitlines()]

    if delivery_type == EmailDeliveryType.inline:
        data = None

        # Parse the csv file and generate HTML
        columns = rows.pop(0)
        with app.app_context():
            body = render_template(
                "superset/reports/slice_data.html",
                columns=columns,
                rows=rows,
                name=slc.slice_name,
                link=slice_url_user_friendly,
            )

    elif delivery_type == EmailDeliveryType.attachment:
        data = {__("%(name)s.csv", name=slc.slice_name): content}
        body = __(
            '<b><a href="%(url)s">Explore in Superset</a></b><p></p>',
            name=slc.slice_name,
            url=slice_url_user_friendly,
        )

    # how to: https://api.slack.com/reference/surfaces/formatting
    slack_message = __(
        """
        *%(slice_name)s*\n
        <%(slice_url_user_friendly)s|Explore in Superset>
        """,
        slice_name=slc.slice_name,
        slice_url_user_friendly=slice_url_user_friendly,
    )

    return ReportContent(body, data, None, slack_message, content)
 def test_time_grain_blacklist(self):
     with app.app_context():
         app.config["TIME_GRAIN_BLACKLIST"] = ["PT1M"]
         time_grain_functions = SqliteEngineSpec.get_time_grain_functions()
         self.assertNotIn("PT1M", time_grain_functions)
Beispiel #4
0
            security_manager.auth_role_admin)
        admin_user = security_manager.add_user(conf.get('DEFAULT_USERNAME'),
                                               os.environ['ADMIN_FIRST_NAME'],
                                               os.environ['ADMIN_LAST_NAME'],
                                               os.environ['ADMIN_EMAIL'],
                                               role_admin,
                                               os.environ['ADMIN_PASSWORD'])
        # Create default roles and permissions
        utils.get_or_create_main_db()
        security_manager.sync_role_definitions()

        # set admin user as a current user
        g.user = admin_user

        # Add dashboards
        dashboards_root = '/home/superset/dashboards'
        import_dashboard(dashboards_root + '/gitbase/overview.json')
        if conf.get('SYNC_MODE'):
            import_dashboard(dashboards_root + '/metadata/welcome.json')
            import_dashboard(dashboards_root + '/metadata/collaboration.json')
        else:
            import_dashboard(dashboards_root + '/gitbase/welcome.json')

        # set welcome dashboard as a default
        set_welcome_dashboard(conf.get('DEFAULT_DASHBOARD_ID'), admin_user)


if __name__ == '__main__':
    with app.app_context():
        bootstrap()
def load_explore_json_into_cache(  # pylint: disable=too-many-locals
    job_metadata: Dict[str, Any],
    form_data: Dict[str, Any],
    response_type: Optional[str] = None,
    force: bool = False,
) -> None:
    with app.app_context():  # type: ignore
        cache_key_prefix = "ejr-"  # ejr: explore_json request
        try:
            ensure_user_is_set(job_metadata.get("user_id"))
            datasource_id, datasource_type = get_datasource_info(None, None, form_data)

            # Perform a deep copy here so that below we can cache the original
            # value of the form_data object. This is necessary since the viz
            # objects modify the form_data object. If the modified version were
            # to be cached here, it will lead to a cache miss when clients
            # attempt to retrieve the value of the completed async query.
            original_form_data = copy.deepcopy(form_data)

            viz_obj = get_viz(
                datasource_type=cast(str, datasource_type),
                datasource_id=datasource_id,
                form_data=form_data,
                force=force,
            )
            # run query & cache results
            payload = viz_obj.get_payload()
            if viz_obj.has_error(payload):
                raise SupersetVizException(errors=payload["errors"])

            # Cache the original form_data value for async retrieval
            cache_value = {
                "form_data": original_form_data,
                "response_type": response_type,
            }
            cache_key = generate_cache_key(cache_value, cache_key_prefix)
            set_and_log_cache(cache_manager.cache, cache_key, cache_value)
            result_url = f"/superset/explore_json/data/{cache_key}"
            async_query_manager.update_job(
                job_metadata, async_query_manager.STATUS_DONE, result_url=result_url,
            )
        except SoftTimeLimitExceeded as ex:
            logger.warning(
                "A timeout occurred while loading explore json, error: %s", ex
            )
            raise ex
        except Exception as exc:
            if isinstance(exc, SupersetVizException):
                errors = exc.errors  # pylint: disable=no-member
            else:
                error = (
                    exc.message if hasattr(exc, "message") else str(exc)  # type: ignore # pylint: disable=no-member
                )
                errors = [error]

            async_query_manager.update_job(
                job_metadata, async_query_manager.STATUS_ERROR, errors=errors
            )
            raise exc

        return None