Exemplo n.º 1
0
 async def post(self, request):
     database_route = tilde_decode(request.url_vars["database"])
     try:
         db = self.ds.get_database(route=database_route)
     except KeyError:
         raise NotFound("Database not found: {}".format(database_route))
     database_name = db.name
     table_name = tilde_decode(request.url_vars["table"])
     # Handle POST to a canned query
     canned_query = await self.ds.get_canned_query(database_name,
                                                   table_name,
                                                   request.actor)
     assert canned_query, "You may only POST to a canned query"
     return await QueryView(self.ds).data(
         request,
         canned_query["sql"],
         metadata=canned_query,
         editable=False,
         canned_query=table_name,
         named_parameters=canned_query.get("params"),
         write=bool(canned_query.get("write")),
     )
Exemplo n.º 2
0
 async def get(self, request):
     database = tilde_decode(request.url_vars["database"])
     await self.ds.ensure_permissions(
         request.actor,
         [
             ("view-database-download", database),
             ("view-database", database),
             "view-instance",
         ],
     )
     try:
         db = self.ds.get_database(route=database)
     except KeyError:
         raise DatasetteError("Invalid database", status=404)
     if db.is_memory:
         raise DatasetteError("Cannot download in-memory databases",
                              status=404)
     if not self.ds.setting("allow_download") or db.is_mutable:
         raise Forbidden("Database download is forbidden")
     if not db.path:
         raise DatasetteError("Cannot download database", status=404)
     filepath = db.path
     headers = {}
     if self.ds.cors:
         add_cors_headers(headers)
     if db.hash:
         etag = '"{}"'.format(db.hash)
         headers["Etag"] = etag
         # Has user seen this already?
         if_none_match = request.headers.get("if-none-match")
         if if_none_match and if_none_match == etag:
             return Response("", status=304)
     headers["Transfer-Encoding"] = "chunked"
     return AsgiFileDownload(
         filepath,
         filename=os.path.basename(filepath),
         content_type="application/octet-stream",
         headers=headers,
     )
Exemplo n.º 3
0
    async def get(self, request):
        database_route = tilde_decode(request.url_vars["database"])

        try:
            db = self.ds.get_database(route=database_route)
        except KeyError:
            raise NotFound("Database not found: {}".format(database_route))
        database = db.name

        _format = request.url_vars["format"]
        data_kwargs = {}

        if _format == "csv":
            return await self.as_csv(request, database_route)

        if _format is None:
            # HTML views default to expanding all foreign key labels
            data_kwargs["default_labels"] = True

        extra_template_data = {}
        start = time.perf_counter()
        status_code = None
        templates = []
        try:
            response_or_template_contexts = await self.data(
                request, **data_kwargs)
            if isinstance(response_or_template_contexts, Response):
                return response_or_template_contexts
            # If it has four items, it includes an HTTP status code
            if len(response_or_template_contexts) == 4:
                (
                    data,
                    extra_template_data,
                    templates,
                    status_code,
                ) = response_or_template_contexts
            else:
                data, extra_template_data, templates = response_or_template_contexts
        except QueryInterrupted:
            raise DatasetteError(
                """
                SQL query took too long. The time limit is controlled by the
                <a href="https://docs.datasette.io/en/stable/settings.html#sql-time-limit-ms">sql_time_limit_ms</a>
                configuration option.
            """,
                title="SQL Interrupted",
                status=400,
                message_is_html=True,
            )
        except (sqlite3.OperationalError, InvalidSql) as e:
            raise DatasetteError(str(e), title="Invalid SQL", status=400)

        except sqlite3.OperationalError as e:
            raise DatasetteError(str(e))

        except DatasetteError:
            raise

        end = time.perf_counter()
        data["query_ms"] = (end - start) * 1000
        for key in ("source", "source_url", "license", "license_url"):
            value = self.ds.metadata(key)
            if value:
                data[key] = value

        # Special case for .jsono extension - redirect to _shape=objects
        if _format == "jsono":
            return self.redirect(
                request,
                path_with_added_args(
                    request,
                    {"_shape": "objects"},
                    path=request.path.rsplit(".jsono", 1)[0] + ".json",
                ),
                forward_querystring=False,
            )

        if _format in self.ds.renderers.keys():
            # Dispatch request to the correct output format renderer
            # (CSV is not handled here due to streaming)
            result = call_with_supported_arguments(
                self.ds.renderers[_format][0],
                datasette=self.ds,
                columns=data.get("columns") or [],
                rows=data.get("rows") or [],
                sql=data.get("query", {}).get("sql", None),
                query_name=data.get("query_name"),
                database=database,
                table=data.get("table"),
                request=request,
                view_name=self.name,
                # These will be deprecated in Datasette 1.0:
                args=request.args,
                data=data,
            )
            if asyncio.iscoroutine(result):
                result = await result
            if result is None:
                raise NotFound("No data")
            if isinstance(result, dict):
                r = Response(
                    body=result.get("body"),
                    status=result.get("status_code", status_code or 200),
                    content_type=result.get("content_type", "text/plain"),
                    headers=result.get("headers"),
                )
            elif isinstance(result, Response):
                r = result
                if status_code is not None:
                    # Over-ride the status code
                    r.status = status_code
            else:
                assert False, f"{result} should be dict or Response"
        else:
            extras = {}
            if callable(extra_template_data):
                extras = extra_template_data()
                if asyncio.iscoroutine(extras):
                    extras = await extras
            else:
                extras = extra_template_data
            url_labels_extra = {}
            if data.get("expandable_columns"):
                url_labels_extra = {"_labels": "on"}

            renderers = {}
            for key, (_, can_render) in self.ds.renderers.items():
                it_can_render = call_with_supported_arguments(
                    can_render,
                    datasette=self.ds,
                    columns=data.get("columns") or [],
                    rows=data.get("rows") or [],
                    sql=data.get("query", {}).get("sql", None),
                    query_name=data.get("query_name"),
                    database=database,
                    table=data.get("table"),
                    request=request,
                    view_name=self.name,
                )
                it_can_render = await await_me_maybe(it_can_render)
                if it_can_render:
                    renderers[key] = self.ds.urls.path(
                        path_with_format(request=request,
                                         format=key,
                                         extra_qs={**url_labels_extra}))

            url_csv_args = {"_size": "max", **url_labels_extra}
            url_csv = self.ds.urls.path(
                path_with_format(request=request,
                                 format="csv",
                                 extra_qs=url_csv_args))
            url_csv_path = url_csv.split("?")[0]
            context = {
                **data,
                **extras,
                **{
                    "renderers":
                    renderers,
                    "url_csv":
                    url_csv,
                    "url_csv_path":
                    url_csv_path,
                    "url_csv_hidden_args": [(key, value) for key, value in urllib.parse.parse_qsl(request.query_string) if key not in ("_labels", "_facet", "_size")] + [("_size", "max")],
                    "datasette_version":
                    __version__,
                    "settings":
                    self.ds.settings_dict(),
                },
            }
            if "metadata" not in context:
                context["metadata"] = self.ds.metadata
            r = await self.render(templates, request=request, context=context)
            if status_code is not None:
                r.status = status_code

        ttl = request.args.get("_ttl", None)
        if ttl is None or not ttl.isdigit():
            ttl = self.ds.setting("default_cache_ttl")

        return self.set_response_headers(r, ttl)
Exemplo n.º 4
0
def test_tilde_encoding(original, expected):
    actual = utils.tilde_encode(original)
    assert actual == expected
    # And test round-trip
    assert original == utils.tilde_decode(actual)
Exemplo n.º 5
0
    async def _data_traced(
        self,
        request,
        default_labels=False,
        _next=None,
        _size=None,
    ):
        database_route = tilde_decode(request.url_vars["database"])
        table_name = tilde_decode(request.url_vars["table"])
        try:
            db = self.ds.get_database(route=database_route)
        except KeyError:
            raise NotFound("Database not found: {}".format(database_route))
        database_name = db.name

        # For performance profiling purposes, ?_noparallel=1 turns off asyncio.gather
        async def _gather_parallel(*args):
            return await asyncio.gather(*args)

        async def _gather_sequential(*args):
            results = []
            for fn in args:
                results.append(await fn)
            return results

        gather = (_gather_sequential
                  if request.args.get("_noparallel") else _gather_parallel)

        # If this is a canned query, not a table, then dispatch to QueryView instead
        canned_query = await self.ds.get_canned_query(database_name,
                                                      table_name,
                                                      request.actor)
        if canned_query:
            return await QueryView(self.ds).data(
                request,
                canned_query["sql"],
                metadata=canned_query,
                editable=False,
                canned_query=table_name,
                named_parameters=canned_query.get("params"),
                write=bool(canned_query.get("write")),
            )

        is_view, table_exists = map(
            bool,
            await gather(db.get_view_definition(table_name),
                         db.table_exists(table_name)),
        )

        # If table or view not found, return 404
        if not is_view and not table_exists:
            raise NotFound(f"Table not found: {table_name}")

        # Ensure user has permission to view this table
        await self.ds.ensure_permissions(
            request.actor,
            [
                ("view-table", (database_name, table_name)),
                ("view-database", database_name),
                "view-instance",
            ],
        )

        private = not await self.ds.permission_allowed(
            None, "view-table", (database_name, table_name), default=True)

        # Handle ?_filter_column and redirect, if present
        redirect_params = filters_should_redirect(request.args)
        if redirect_params:
            return self.redirect(
                request,
                path_with_added_args(request, redirect_params),
                forward_querystring=False,
            )

        # If ?_sort_by_desc=on (from checkbox) redirect to _sort_desc=(_sort)
        if "_sort_by_desc" in request.args:
            return self.redirect(
                request,
                path_with_added_args(
                    request,
                    {
                        "_sort_desc": request.args.get("_sort"),
                        "_sort_by_desc": None,
                        "_sort": None,
                    },
                ),
                forward_querystring=False,
            )

        # Introspect columns and primary keys for table
        pks = await db.primary_keys(table_name)
        table_columns = await db.table_columns(table_name)

        # Take ?_col= and ?_nocol= into account
        specified_columns = await self.columns_to_select(
            table_columns, pks, request)
        select_specified_columns = ", ".join(
            escape_sqlite(t) for t in specified_columns)
        select_all_columns = ", ".join(escape_sqlite(t) for t in table_columns)

        # rowid tables (no specified primary key) need a different SELECT
        use_rowid = not pks and not is_view
        if use_rowid:
            select_specified_columns = f"rowid, {select_specified_columns}"
            select_all_columns = f"rowid, {select_all_columns}"
            order_by = "rowid"
            order_by_pks = "rowid"
        else:
            order_by_pks = ", ".join([escape_sqlite(pk) for pk in pks])
            order_by = order_by_pks

        if is_view:
            order_by = ""

        nocount = request.args.get("_nocount")
        nofacet = request.args.get("_nofacet")
        nosuggest = request.args.get("_nosuggest")

        if request.args.get("_shape") in ("array", "object"):
            nocount = True
            nofacet = True

        table_metadata = self.ds.table_metadata(database_name, table_name)
        units = table_metadata.get("units", {})

        # Arguments that start with _ and don't contain a __ are
        # special - things like ?_search= - and should not be
        # treated as filters.
        filter_args = []
        for key in request.args:
            if not (key.startswith("_") and "__" not in key):
                for v in request.args.getlist(key):
                    filter_args.append((key, v))

        # Build where clauses from query string arguments
        filters = Filters(sorted(filter_args), units, ureg)
        where_clauses, params = filters.build_where_clauses(table_name)

        # Execute filters_from_request plugin hooks - including the default
        # ones that live in datasette/filters.py
        extra_context_from_filters = {}
        extra_human_descriptions = []

        for hook in pm.hook.filters_from_request(
                request=request,
                table=table_name,
                database=database_name,
                datasette=self.ds,
        ):
            filter_arguments = await await_me_maybe(hook)
            if filter_arguments:
                where_clauses.extend(filter_arguments.where_clauses)
                params.update(filter_arguments.params)
                extra_human_descriptions.extend(
                    filter_arguments.human_descriptions)
                extra_context_from_filters.update(
                    filter_arguments.extra_context)

        # Deal with custom sort orders
        sortable_columns = await self.sortable_columns_for_table(
            database_name, table_name, use_rowid)
        sort = request.args.get("_sort")
        sort_desc = request.args.get("_sort_desc")

        if not sort and not sort_desc:
            sort = table_metadata.get("sort")
            sort_desc = table_metadata.get("sort_desc")

        if sort and sort_desc:
            raise DatasetteError(
                "Cannot use _sort and _sort_desc at the same time")

        if sort:
            if sort not in sortable_columns:
                raise DatasetteError(f"Cannot sort table by {sort}")

            order_by = escape_sqlite(sort)

        if sort_desc:
            if sort_desc not in sortable_columns:
                raise DatasetteError(f"Cannot sort table by {sort_desc}")

            order_by = f"{escape_sqlite(sort_desc)} desc"

        from_sql = "from {table_name} {where}".format(
            table_name=escape_sqlite(table_name),
            where=("where {} ".format(" and ".join(where_clauses)))
            if where_clauses else "",
        )
        # Copy of params so we can mutate them later:
        from_sql_params = dict(**params)

        count_sql = f"select count(*) {from_sql}"

        # Handle pagination driven by ?_next=
        _next = _next or request.args.get("_next")
        offset = ""
        if _next:
            sort_value = None
            if is_view:
                # _next is an offset
                offset = f" offset {int(_next)}"
            else:
                components = urlsafe_components(_next)
                # If a sort order is applied and there are multiple components,
                # the first of these is the sort value
                if (sort or sort_desc) and (len(components) > 1):
                    sort_value = components[0]
                    # Special case for if non-urlencoded first token was $null
                    if _next.split(",")[0] == "$null":
                        sort_value = None
                    components = components[1:]

                # Figure out the SQL for next-based-on-primary-key first
                next_by_pk_clauses = []
                if use_rowid:
                    next_by_pk_clauses.append(f"rowid > :p{len(params)}")
                    params[f"p{len(params)}"] = components[0]
                else:
                    # Apply the tie-breaker based on primary keys
                    if len(components) == len(pks):
                        param_len = len(params)
                        next_by_pk_clauses.append(
                            compound_keys_after_sql(pks, param_len))
                        for i, pk_value in enumerate(components):
                            params[f"p{param_len + i}"] = pk_value

                # Now add the sort SQL, which may incorporate next_by_pk_clauses
                if sort or sort_desc:
                    if sort_value is None:
                        if sort_desc:
                            # Just items where column is null ordered by pk
                            where_clauses.append(
                                "({column} is null and {next_clauses})".format(
                                    column=escape_sqlite(sort_desc),
                                    next_clauses=" and ".join(
                                        next_by_pk_clauses),
                                ))
                        else:
                            where_clauses.append(
                                "({column} is not null or ({column} is null and {next_clauses}))"
                                .format(
                                    column=escape_sqlite(sort),
                                    next_clauses=" and ".join(
                                        next_by_pk_clauses),
                                ))
                    else:
                        where_clauses.append(
                            "({column} {op} :p{p}{extra_desc_only} or ({column} = :p{p} and {next_clauses}))"
                            .format(
                                column=escape_sqlite(sort or sort_desc),
                                op=">" if sort else "<",
                                p=len(params),
                                extra_desc_only=""
                                if sort else " or {column2} is null".format(
                                    column2=escape_sqlite(sort or sort_desc)),
                                next_clauses=" and ".join(next_by_pk_clauses),
                            ))
                        params[f"p{len(params)}"] = sort_value
                    order_by = f"{order_by}, {order_by_pks}"
                else:
                    where_clauses.extend(next_by_pk_clauses)

        where_clause = ""
        if where_clauses:
            where_clause = f"where {' and '.join(where_clauses)} "

        if order_by:
            order_by = f"order by {order_by}"

        extra_args = {}
        # Handle ?_size=500
        page_size = _size or request.args.get("_size") or table_metadata.get(
            "size")
        if page_size:
            if page_size == "max":
                page_size = self.ds.max_returned_rows
            try:
                page_size = int(page_size)
                if page_size < 0:
                    raise ValueError

            except ValueError:
                raise BadRequest("_size must be a positive integer")

            if page_size > self.ds.max_returned_rows:
                raise BadRequest(
                    f"_size must be <= {self.ds.max_returned_rows}")

            extra_args["page_size"] = page_size
        else:
            page_size = self.ds.page_size

        # Facets are calculated against SQL without order by or limit
        sql_no_order_no_limit = (
            "select {select_all_columns} from {table_name} {where}".format(
                select_all_columns=select_all_columns,
                table_name=escape_sqlite(table_name),
                where=where_clause,
            ))

        # This is the SQL that populates the main table on the page
        sql = "select {select_specified_columns} from {table_name} {where}{order_by} limit {page_size}{offset}".format(
            select_specified_columns=select_specified_columns,
            table_name=escape_sqlite(table_name),
            where=where_clause,
            order_by=order_by,
            page_size=page_size + 1,
            offset=offset,
        )

        if request.args.get("_timelimit"):
            extra_args["custom_time_limit"] = int(
                request.args.get("_timelimit"))

        # Execute the main query!
        results = await db.execute(sql, params, truncate=True, **extra_args)

        # Calculate the total count for this query
        filtered_table_rows_count = None
        if (not db.is_mutable and self.ds.inspect_data
                and count_sql == f"select count(*) from {table_name} "):
            # We can use a previously cached table row count
            try:
                filtered_table_rows_count = self.ds.inspect_data[
                    database_name]["tables"][table_name]["count"]
            except KeyError:
                pass

        # Otherwise run a select count(*) ...
        if count_sql and filtered_table_rows_count is None and not nocount:
            try:
                count_rows = list(await db.execute(count_sql, from_sql_params))
                filtered_table_rows_count = count_rows[0][0]
            except QueryInterrupted:
                pass

        # Faceting
        if not self.ds.setting("allow_facet") and any(
                arg.startswith("_facet") for arg in request.args):
            raise BadRequest("_facet= is not allowed")

        # pylint: disable=no-member
        facet_classes = list(
            itertools.chain.from_iterable(pm.hook.register_facet_classes()))
        facet_results = {}
        facets_timed_out = []
        facet_instances = []
        for klass in facet_classes:
            facet_instances.append(
                klass(
                    self.ds,
                    request,
                    database_name,
                    sql=sql_no_order_no_limit,
                    params=params,
                    table=table_name,
                    metadata=table_metadata,
                    row_count=filtered_table_rows_count,
                ))

        async def execute_facets():
            if not nofacet:
                # Run them in parallel
                facet_awaitables = [
                    facet.facet_results() for facet in facet_instances
                ]
                facet_awaitable_results = await gather(*facet_awaitables)
                for (
                        instance_facet_results,
                        instance_facets_timed_out,
                ) in facet_awaitable_results:
                    for facet_info in instance_facet_results:
                        base_key = facet_info["name"]
                        key = base_key
                        i = 1
                        while key in facet_results:
                            i += 1
                            key = f"{base_key}_{i}"
                        facet_results[key] = facet_info
                    facets_timed_out.extend(instance_facets_timed_out)

        suggested_facets = []

        async def execute_suggested_facets():
            # Calculate suggested facets
            if (self.ds.setting("suggest_facets")
                    and self.ds.setting("allow_facet") and not _next
                    and not nofacet and not nosuggest):
                # Run them in parallel
                facet_suggest_awaitables = [
                    facet.suggest() for facet in facet_instances
                ]
                for suggest_result in await gather(*facet_suggest_awaitables):
                    suggested_facets.extend(suggest_result)

        await gather(execute_facets(), execute_suggested_facets())

        # Figure out columns and rows for the query
        columns = [r[0] for r in results.description]
        rows = list(results.rows)

        # Expand labeled columns if requested
        expanded_columns = []
        expandable_columns = await self.expandable_columns(
            database_name, table_name)
        columns_to_expand = None
        try:
            all_labels = value_as_boolean(request.args.get("_labels", ""))
        except ValueError:
            all_labels = default_labels
        # Check for explicit _label=
        if "_label" in request.args:
            columns_to_expand = request.args.getlist("_label")
        if columns_to_expand is None and all_labels:
            # expand all columns with foreign keys
            columns_to_expand = [fk["column"] for fk, _ in expandable_columns]

        if columns_to_expand:
            expanded_labels = {}
            for fk, _ in expandable_columns:
                column = fk["column"]
                if column not in columns_to_expand:
                    continue
                if column not in columns:
                    continue
                expanded_columns.append(column)
                # Gather the values
                column_index = columns.index(column)
                values = [row[column_index] for row in rows]
                # Expand them
                expanded_labels.update(await self.ds.expand_foreign_keys(
                    database_name, table_name, column, values))
            if expanded_labels:
                # Rewrite the rows
                new_rows = []
                for row in rows:
                    new_row = CustomRow(columns)
                    for column in row.keys():
                        value = row[column]
                        if (column, value
                            ) in expanded_labels and value is not None:
                            new_row[column] = {
                                "value": value,
                                "label": expanded_labels[(column, value)],
                            }
                        else:
                            new_row[column] = value
                    new_rows.append(new_row)
                rows = new_rows

        # Pagination next link
        next_value = None
        next_url = None
        if 0 < page_size < len(rows):
            if is_view:
                next_value = int(_next or 0) + page_size
            else:
                next_value = path_from_row_pks(rows[-2], pks, use_rowid)
            # If there's a sort or sort_desc, add that value as a prefix
            if (sort or sort_desc) and not is_view:
                prefix = rows[-2][sort or sort_desc]
                if isinstance(prefix, dict) and "value" in prefix:
                    prefix = prefix["value"]
                if prefix is None:
                    prefix = "$null"
                else:
                    prefix = tilde_encode(str(prefix))
                next_value = f"{prefix},{next_value}"
                added_args = {"_next": next_value}
                if sort:
                    added_args["_sort"] = sort
                else:
                    added_args["_sort_desc"] = sort_desc
            else:
                added_args = {"_next": next_value}
            next_url = self.ds.absolute_url(
                request,
                self.ds.urls.path(path_with_replaced_args(request,
                                                          added_args)))
            rows = rows[:page_size]

        # human_description_en combines filters AND search, if provided
        human_description_en = filters.human_description_en(
            extra=extra_human_descriptions)

        if sort or sort_desc:
            sorted_by = "sorted by {}{}".format(
                (sort or sort_desc), " descending" if sort_desc else "")
            human_description_en = " ".join(
                [b for b in [human_description_en, sorted_by] if b])

        async def extra_template():
            nonlocal sort

            display_columns, display_rows = await display_columns_and_rows(
                self.ds,
                database_name,
                table_name,
                results.description,
                rows,
                link_column=not is_view,
                truncate_cells=self.ds.setting("truncate_cells_html"),
                sortable_columns=await
                self.sortable_columns_for_table(database_name,
                                                table_name,
                                                use_rowid=True),
            )
            metadata = ((self.ds.metadata("databases")
                         or {}).get(database_name,
                                    {}).get("tables", {}).get(table_name, {}))
            self.ds.update_with_inherited_metadata(metadata)

            form_hidden_args = []
            for key in request.args:
                if (key.startswith("_")
                        and key not in ("_sort", "_search", "_next")
                        and "__" not in key):
                    for value in request.args.getlist(key):
                        form_hidden_args.append((key, value))

            # if no sort specified AND table has a single primary key,
            # set sort to that so arrow is displayed
            if not sort and not sort_desc:
                if 1 == len(pks):
                    sort = pks[0]
                elif use_rowid:
                    sort = "rowid"

            async def table_actions():
                links = []
                for hook in pm.hook.table_actions(
                        datasette=self.ds,
                        table=table_name,
                        database=database_name,
                        actor=request.actor,
                        request=request,
                ):
                    extra_links = await await_me_maybe(hook)
                    if extra_links:
                        links.extend(extra_links)
                return links

            # filter_columns combine the columns we know are available
            # in the table with any additional columns (such as rowid)
            # which are available in the query
            filter_columns = list(columns) + [
                table_column for table_column in table_columns
                if table_column not in columns
            ]
            d = {
                "table_actions":
                table_actions,
                "use_rowid":
                use_rowid,
                "filters":
                filters,
                "display_columns":
                display_columns,
                "filter_columns":
                filter_columns,
                "display_rows":
                display_rows,
                "facets_timed_out":
                facets_timed_out,
                "sorted_facet_results":
                sorted(
                    facet_results.values(),
                    key=lambda f: (len(f["results"]), f["name"]),
                    reverse=True,
                ),
                "form_hidden_args":
                form_hidden_args,
                "is_sortable":
                any(c["sortable"] for c in display_columns),
                "fix_path":
                self.ds.urls.path,
                "path_with_replaced_args":
                path_with_replaced_args,
                "path_with_removed_args":
                path_with_removed_args,
                "append_querystring":
                append_querystring,
                "request":
                request,
                "sort":
                sort,
                "sort_desc":
                sort_desc,
                "disable_sort":
                is_view,
                "custom_table_templates": [
                    f"_table-{to_css_class(database_name)}-{to_css_class(table_name)}.html",
                    f"_table-table-{to_css_class(database_name)}-{to_css_class(table_name)}.html",
                    "_table.html",
                ],
                "metadata":
                metadata,
                "view_definition":
                await db.get_view_definition(table_name),
                "table_definition":
                await db.get_table_definition(table_name),
                "datasette_allow_facet":
                "true" if self.ds.setting("allow_facet") else "false",
            }
            d.update(extra_context_from_filters)
            return d

        return (
            {
                "database":
                database_name,
                "table":
                table_name,
                "is_view":
                is_view,
                "human_description_en":
                human_description_en,
                "rows":
                rows[:page_size],
                "truncated":
                results.truncated,
                "filtered_table_rows_count":
                filtered_table_rows_count,
                "expanded_columns":
                expanded_columns,
                "expandable_columns":
                expandable_columns,
                "columns":
                columns,
                "primary_keys":
                pks,
                "units":
                units,
                "query": {
                    "sql": sql,
                    "params": params
                },
                "facet_results":
                facet_results,
                "suggested_facets":
                suggested_facets,
                "next":
                next_value and str(next_value) or None,
                "next_url":
                next_url,
                "private":
                private,
                "allow_execute_sql":
                await self.ds.permission_allowed(request.actor,
                                                 "execute-sql",
                                                 database_name,
                                                 default=True),
            },
            extra_template,
            (
                f"table-{to_css_class(database_name)}-{to_css_class(table_name)}.html",
                "table.html",
            ),
        )
Exemplo n.º 6
0
    async def data(self, request, default_labels=False, _size=None):
        database_route = tilde_decode(request.url_vars["database"])
        try:
            db = self.ds.get_database(route=database_route)
        except KeyError:
            raise NotFound("Database not found: {}".format(database_route))
        database = db.name

        await self.ds.ensure_permissions(
            request.actor,
            [
                ("view-database", database),
                "view-instance",
            ],
        )
        metadata = (self.ds.metadata("databases") or {}).get(database, {})
        self.ds.update_with_inherited_metadata(metadata)

        if request.args.get("sql"):
            sql = request.args.get("sql")
            validate_sql_select(sql)
            return await QueryView(self.ds).data(request,
                                                 sql,
                                                 _size=_size,
                                                 metadata=metadata)

        table_counts = await db.table_counts(5)
        hidden_table_names = set(await db.hidden_table_names())
        all_foreign_keys = await db.get_all_foreign_keys()

        views = []
        for view_name in await db.view_names():
            visible, private = await self.ds.check_visibility(
                request.actor,
                "view-table",
                (database, view_name),
            )
            if visible:
                views.append({
                    "name": view_name,
                    "private": private,
                })

        tables = []
        for table in table_counts:
            visible, private = await self.ds.check_visibility(
                request.actor,
                "view-table",
                (database, table),
            )
            if not visible:
                continue
            table_columns = await db.table_columns(table)
            tables.append({
                "name": table,
                "columns": table_columns,
                "primary_keys": await db.primary_keys(table),
                "count": table_counts[table],
                "hidden": table in hidden_table_names,
                "fts_table": await db.fts_table(table),
                "foreign_keys": all_foreign_keys[table],
                "private": private,
            })

        tables.sort(key=lambda t: (t["hidden"], t["name"]))
        canned_queries = []
        for query in (await
                      self.ds.get_canned_queries(database,
                                                 request.actor)).values():
            visible, private = await self.ds.check_visibility(
                request.actor,
                "view-query",
                (database, query["name"]),
            )
            if visible:
                canned_queries.append(dict(query, private=private))

        async def database_actions():
            links = []
            for hook in pm.hook.database_actions(
                    datasette=self.ds,
                    database=database,
                    actor=request.actor,
                    request=request,
            ):
                extra_links = await await_me_maybe(hook)
                if extra_links:
                    links.extend(extra_links)
            return links

        attached_databases = [d.name for d in await db.attached_databases()]

        return (
            {
                "database":
                database,
                "path":
                self.ds.urls.database(database),
                "size":
                db.size,
                "tables":
                tables,
                "hidden_count":
                len([t for t in tables if t["hidden"]]),
                "views":
                views,
                "queries":
                canned_queries,
                "private":
                not await self.ds.permission_allowed(
                    None, "view-database", database, default=True),
                "allow_execute_sql":
                await self.ds.permission_allowed(request.actor,
                                                 "execute-sql",
                                                 database,
                                                 default=True),
            },
            {
                "database_actions":
                database_actions,
                "show_hidden":
                request.args.get("_show_hidden"),
                "editable":
                True,
                "metadata":
                metadata,
                "allow_download":
                self.ds.setting("allow_download") and not db.is_mutable
                and not db.is_memory,
                "attached_databases":
                attached_databases,
            },
            (f"database-{to_css_class(database)}.html", "database.html"),
        )
Exemplo n.º 7
0
    async def data(
        self,
        request,
        sql,
        editable=True,
        canned_query=None,
        metadata=None,
        _size=None,
        named_parameters=None,
        write=False,
    ):
        database_route = tilde_decode(request.url_vars["database"])
        try:
            db = self.ds.get_database(route=database_route)
        except KeyError:
            raise NotFound("Database not found: {}".format(database_route))
        database = db.name
        params = {key: request.args.get(key) for key in request.args}
        if "sql" in params:
            params.pop("sql")
        if "_shape" in params:
            params.pop("_shape")

        private = False
        if canned_query:
            # Respect canned query permissions
            await self.ds.ensure_permissions(
                request.actor,
                [
                    ("view-query", (database, canned_query)),
                    ("view-database", database),
                    "view-instance",
                ],
            )
            private = not await self.ds.permission_allowed(
                None, "view-query", (database, canned_query), default=True)
        else:
            await self.ds.ensure_permissions(request.actor,
                                             [("execute-sql", database)])

        # Extract any :named parameters
        named_parameters = named_parameters or await derive_named_parameters(
            self.ds.get_database(database), sql)
        named_parameter_values = {
            named_parameter: params.get(named_parameter) or ""
            for named_parameter in named_parameters
            if not named_parameter.startswith("_")
        }

        # Set to blank string if missing from params
        for named_parameter in named_parameters:
            if named_parameter not in params and not named_parameter.startswith(
                    "_"):
                params[named_parameter] = ""

        extra_args = {}
        if params.get("_timelimit"):
            extra_args["custom_time_limit"] = int(params["_timelimit"])
        if _size:
            extra_args["page_size"] = _size

        templates = [f"query-{to_css_class(database)}.html", "query.html"]
        if canned_query:
            templates.insert(
                0,
                f"query-{to_css_class(database)}-{to_css_class(canned_query)}.html",
            )

        query_error = None

        # Execute query - as write or as read
        if write:
            if request.method == "POST":
                body = await request.post_body()
                body = body.decode("utf-8").strip()
                if body.startswith("{") and body.endswith("}"):
                    params = json.loads(body)
                    # But we want key=value strings
                    for key, value in params.items():
                        params[key] = str(value)
                else:
                    params = dict(parse_qsl(body, keep_blank_values=True))
                # Should we return JSON?
                should_return_json = (request.headers.get("accept")
                                      == "application/json"
                                      or request.args.get("_json")
                                      or params.get("_json"))
                if canned_query:
                    params_for_query = MagicParameters(params, request,
                                                       self.ds)
                else:
                    params_for_query = params
                ok = None
                try:
                    cursor = await self.ds.databases[database].execute_write(
                        sql, params_for_query)
                    message = metadata.get(
                        "on_success_message"
                    ) or "Query executed, {} row{} affected".format(
                        cursor.rowcount, "" if cursor.rowcount == 1 else "s")
                    message_type = self.ds.INFO
                    redirect_url = metadata.get("on_success_redirect")
                    ok = True
                except Exception as e:
                    message = metadata.get("on_error_message") or str(e)
                    message_type = self.ds.ERROR
                    redirect_url = metadata.get("on_error_redirect")
                    ok = False
                if should_return_json:
                    return Response.json({
                        "ok": ok,
                        "message": message,
                        "redirect": redirect_url,
                    })
                else:
                    self.ds.add_message(request, message, message_type)
                    return self.redirect(request, redirect_url or request.path)
            else:

                async def extra_template():
                    return {
                        "request": request,
                        "path_with_added_args": path_with_added_args,
                        "path_with_removed_args": path_with_removed_args,
                        "named_parameter_values": named_parameter_values,
                        "canned_query": canned_query,
                        "success_message": request.args.get("_success") or "",
                        "canned_write": True,
                    }

                return (
                    {
                        "database": database,
                        "rows": [],
                        "truncated": False,
                        "columns": [],
                        "query": {
                            "sql": sql,
                            "params": params
                        },
                        "private": private,
                    },
                    extra_template,
                    templates,
                )
        else:  # Not a write
            if canned_query:
                params_for_query = MagicParameters(params, request, self.ds)
            else:
                params_for_query = params
            try:
                results = await self.ds.execute(database,
                                                sql,
                                                params_for_query,
                                                truncate=True,
                                                **extra_args)
                columns = [r[0] for r in results.description]
            except sqlite3.DatabaseError as e:
                query_error = e
                results = None
                columns = []

        allow_execute_sql = await self.ds.permission_allowed(request.actor,
                                                             "execute-sql",
                                                             database,
                                                             default=True)

        async def extra_template():
            display_rows = []
            for row in results.rows if results else []:
                display_row = []
                for column, value in zip(results.columns, row):
                    display_value = value
                    # Let the plugins have a go
                    # pylint: disable=no-member
                    plugin_display_value = None
                    for candidate in pm.hook.render_cell(
                            row=row,
                            value=value,
                            column=column,
                            table=None,
                            database=database,
                            datasette=self.ds,
                    ):
                        candidate = await await_me_maybe(candidate)
                        if candidate is not None:
                            plugin_display_value = candidate
                            break
                    if plugin_display_value is not None:
                        display_value = plugin_display_value
                    else:
                        if value in ("", None):
                            display_value = Markup("&nbsp;")
                        elif is_url(str(display_value).strip()):
                            display_value = Markup(
                                '<a href="{url}">{url}</a>'.format(
                                    url=escape(value.strip())))
                        elif isinstance(display_value, bytes):
                            blob_url = path_with_format(
                                request=request,
                                format="blob",
                                extra_qs={
                                    "_blob_column":
                                    column,
                                    "_blob_hash":
                                    hashlib.sha256(display_value).hexdigest(),
                                },
                            )
                            formatted = format_bytes(len(value))
                            display_value = markupsafe.Markup(
                                '<a class="blob-download" href="{}"{}>&lt;Binary:&nbsp;{:,}&nbsp;byte{}&gt;</a>'
                                .format(
                                    blob_url,
                                    ' title="{}"'.format(formatted)
                                    if "bytes" not in formatted else "",
                                    len(value),
                                    "" if len(value) == 1 else "s",
                                ))

                    display_row.append(display_value)
                display_rows.append(display_row)

            # Show 'Edit SQL' button only if:
            # - User is allowed to execute SQL
            # - SQL is an approved SELECT statement
            # - No magic parameters, so no :_ in the SQL string
            edit_sql_url = None
            is_validated_sql = False
            try:
                validate_sql_select(sql)
                is_validated_sql = True
            except InvalidSql:
                pass
            if allow_execute_sql and is_validated_sql and ":_" not in sql:
                edit_sql_url = (self.ds.urls.database(database) + "?" +
                                urlencode({
                                    **{
                                        "sql": sql,
                                    },
                                    **named_parameter_values,
                                }))

            show_hide_hidden = ""
            if metadata.get("hide_sql"):
                if bool(params.get("_show_sql")):
                    show_hide_link = path_with_removed_args(
                        request, {"_show_sql"})
                    show_hide_text = "hide"
                    show_hide_hidden = (
                        '<input type="hidden" name="_show_sql" value="1">')
                else:
                    show_hide_link = path_with_added_args(
                        request, {"_show_sql": 1})
                    show_hide_text = "show"
            else:
                if bool(params.get("_hide_sql")):
                    show_hide_link = path_with_removed_args(
                        request, {"_hide_sql"})
                    show_hide_text = "show"
                    show_hide_hidden = (
                        '<input type="hidden" name="_hide_sql" value="1">')
                else:
                    show_hide_link = path_with_added_args(
                        request, {"_hide_sql": 1})
                    show_hide_text = "hide"
            hide_sql = show_hide_text == "show"
            return {
                "display_rows": display_rows,
                "custom_sql": True,
                "named_parameter_values": named_parameter_values,
                "editable": editable,
                "canned_query": canned_query,
                "edit_sql_url": edit_sql_url,
                "metadata": metadata,
                "settings": self.ds.settings_dict(),
                "request": request,
                "show_hide_link": self.ds.urls.path(show_hide_link),
                "show_hide_text": show_hide_text,
                "show_hide_hidden": markupsafe.Markup(show_hide_hidden),
                "hide_sql": hide_sql,
            }

        return (
            {
                "ok": not query_error,
                "database": database,
                "query_name": canned_query,
                "rows": results.rows if results else [],
                "truncated": results.truncated if results else False,
                "columns": columns,
                "query": {
                    "sql": sql,
                    "params": params
                },
                "error": str(query_error) if query_error else None,
                "private": private,
                "allow_execute_sql": allow_execute_sql,
            },
            extra_template,
            templates,
            400 if query_error else 200,
        )
Exemplo n.º 8
0
    async def data(self, request, default_labels=False):
        database_route = tilde_decode(request.url_vars["database"])
        table = tilde_decode(request.url_vars["table"])
        try:
            db = self.ds.get_database(route=database_route)
        except KeyError:
            raise NotFound("Database not found: {}".format(database_route))
        database = db.name
        await self.ds.ensure_permissions(
            request.actor,
            [
                ("view-table", (database, table)),
                ("view-database", database),
                "view-instance",
            ],
        )
        pk_values = urlsafe_components(request.url_vars["pks"])
        try:
            db = self.ds.get_database(route=database_route)
        except KeyError:
            raise NotFound("Database not found: {}".format(database_route))
        database = db.name
        sql, params, pks = await _sql_params_pks(db, table, pk_values)
        results = await db.execute(sql, params, truncate=True)
        columns = [r[0] for r in results.description]
        rows = list(results.rows)
        if not rows:
            raise NotFound(f"Record not found: {pk_values}")

        async def template_data():
            display_columns, display_rows = await display_columns_and_rows(
                self.ds,
                database,
                table,
                results.description,
                rows,
                link_column=False,
                truncate_cells=0,
            )
            for column in display_columns:
                column["sortable"] = False
            return {
                "foreign_key_tables":
                await self.foreign_key_tables(database, table, pk_values),
                "display_columns":
                display_columns,
                "display_rows":
                display_rows,
                "custom_table_templates": [
                    f"_table-{to_css_class(database)}-{to_css_class(table)}.html",
                    f"_table-row-{to_css_class(database)}-{to_css_class(table)}.html",
                    "_table.html",
                ],
                "metadata": (self.ds.metadata("databases")
                             or {}).get(database, {}).get("tables",
                                                          {}).get(table, {}),
            }

        data = {
            "database": database,
            "table": table,
            "rows": rows,
            "columns": columns,
            "primary_keys": pks,
            "primary_key_values": pk_values,
            "units": self.ds.table_metadata(database, table).get("units", {}),
        }

        if "foreign_key_tables" in (request.args.get("_extras")
                                    or "").split(","):
            data["foreign_key_tables"] = await self.foreign_key_tables(
                database, table, pk_values)

        return (
            data,
            template_data,
            (
                f"row-{to_css_class(database)}-{to_css_class(table)}.html",
                "row.html",
            ),
        )