示例#1
0
async def reconcile(request, datasette):
    database = request.url_vars["db_name"]
    table = request.url_vars["db_table"]
    db = datasette.get_database(database)

    # get plugin configuration
    config = datasette.plugin_config(
        "datasette-reconcile", database=database, table=table
    )
    config = await check_config(config, db, table)

    # check user can at least view this table
    await check_permissions(
        request,
        [
            ("view-table", (database, table)),
            ("view-database", database),
            "view-instance",
        ],
        datasette,
    )

    # work out if we are looking for queries
    post_vars = await request.post_vars()
    queries = post_vars.get("queries", request.args.get("queries"))
    if queries:
        queries = json.loads(queries)
        return Response.json(
            {
                q[0]: {"result": q[1]}
                async for q in reconcile_queries(queries, config, db, table)
            },
            headers={
                "Access-Control-Allow-Origin": "*",
            },
        )

    # if we're not then just return the service specification
    return Response.json(
        service_manifest(config, database, table, datasette, request),
        headers={
            "Access-Control-Allow-Origin": "*",
        },
    )
示例#2
0
 async def parallel_queries(datasette):
     db = datasette.get_database()
     with tracer.trace_child_tasks():
         one, two = await asyncio.gather(
             db.execute("select coalesce(sleep(0.1), 1)"),
             db.execute("select coalesce(sleep(0.1), 2)"),
         )
     return Response.json({
         "one": one.single_value(),
         "two": two.single_value()
     })
async def csv_importer_status(scope, receive, datasette, request):
    if not await datasette.permission_allowed(
        request.actor, "csv-importer", default=False
    ):
        raise Forbidden("Permission denied for csv-importer")

    plugin_config = datasette.plugin_config(
        "datasette-csv-importer"
    ) or {}

    db = get_status_database(datasette, plugin_config)
    status_table = get_status_table(plugin_config)

    query = f"select * from {status_table} where id = ? limit 1"
    result = await db.execute(query, (request.url_vars["task_id"],))
    return Response.json(dict(result.first()))
示例#4
0
    async def data(
        self,
        request,
        database,
        hash,
        sql,
        editable=True,
        canned_query=None,
        metadata=None,
        _size=None,
        named_parameters=None,
        write=False,
    ):
        params = {key: request.args.get(key) for key in request.args}
        if "sql" in params:
            params.pop("sql")
        if "_shape" in params:
            params.pop("_shape")

        private = False
        if canned_query:
            # Respect canned query permissions
            await self.check_permissions(
                request,
                [
                    ("view-query", (database, canned_query)),
                    ("view-database", database),
                    "view-instance",
                ],
            )
            private = not await self.ds.permission_allowed(
                None, "view-query", (database, canned_query), default=True)
        else:
            await self.check_permission(request, "execute-sql", database)

        # Extract any :named parameters
        named_parameters = named_parameters or self.re_named_parameter.findall(
            sql)
        named_parameter_values = {
            named_parameter: params.get(named_parameter) or ""
            for named_parameter in named_parameters
            if not named_parameter.startswith("_")
        }

        # Set to blank string if missing from params
        for named_parameter in named_parameters:
            if named_parameter not in params and not named_parameter.startswith(
                    "_"):
                params[named_parameter] = ""

        extra_args = {}
        if params.get("_timelimit"):
            extra_args["custom_time_limit"] = int(params["_timelimit"])
        if _size:
            extra_args["page_size"] = _size

        templates = [f"query-{to_css_class(database)}.html", "query.html"]

        # Execute query - as write or as read
        if write:
            if request.method == "POST":
                body = await request.post_body()
                body = body.decode("utf-8").strip()
                if body.startswith("{") and body.endswith("}"):
                    params = json.loads(body)
                    # But we want key=value strings
                    for key, value in params.items():
                        params[key] = str(value)
                else:
                    params = dict(parse_qsl(body, keep_blank_values=True))
                # Should we return JSON?
                should_return_json = (request.headers.get("accept")
                                      == "application/json"
                                      or request.args.get("_json")
                                      or params.get("_json"))
                if canned_query:
                    params_for_query = MagicParameters(params, request,
                                                       self.ds)
                else:
                    params_for_query = params
                ok = None
                try:
                    cursor = await self.ds.databases[database].execute_write(
                        sql, params_for_query, block=True)
                    message = metadata.get(
                        "on_success_message"
                    ) or "Query executed, {} row{} affected".format(
                        cursor.rowcount, "" if cursor.rowcount == 1 else "s")
                    message_type = self.ds.INFO
                    redirect_url = metadata.get("on_success_redirect")
                    ok = True
                except Exception as e:
                    message = metadata.get("on_error_message") or str(e)
                    message_type = self.ds.ERROR
                    redirect_url = metadata.get("on_error_redirect")
                    ok = False
                if should_return_json:
                    return Response.json({
                        "ok": ok,
                        "message": message,
                        "redirect": redirect_url,
                    })
                else:
                    self.ds.add_message(request, message, message_type)
                    return self.redirect(request, redirect_url or request.path)
            else:

                async def extra_template():
                    return {
                        "request": request,
                        "path_with_added_args": path_with_added_args,
                        "path_with_removed_args": path_with_removed_args,
                        "named_parameter_values": named_parameter_values,
                        "canned_query": canned_query,
                        "success_message": request.args.get("_success") or "",
                        "canned_write": True,
                    }

                return (
                    {
                        "database": database,
                        "rows": [],
                        "truncated": False,
                        "columns": [],
                        "query": {
                            "sql": sql,
                            "params": params
                        },
                        "private": private,
                    },
                    extra_template,
                    templates,
                )
        else:  # Not a write
            if canned_query:
                params_for_query = MagicParameters(params, request, self.ds)
            else:
                params_for_query = params
            results = await self.ds.execute(database,
                                            sql,
                                            params_for_query,
                                            truncate=True,
                                            **extra_args)
            columns = [r[0] for r in results.description]

        if canned_query:
            templates.insert(
                0,
                f"query-{to_css_class(database)}-{to_css_class(canned_query)}.html",
            )

        allow_execute_sql = await self.ds.permission_allowed(request.actor,
                                                             "execute-sql",
                                                             database,
                                                             default=True)

        async def extra_template():
            display_rows = []
            for row in results.rows:
                display_row = []
                for column, value in zip(results.columns, row):
                    display_value = value
                    # Let the plugins have a go
                    # pylint: disable=no-member
                    plugin_value = pm.hook.render_cell(
                        value=value,
                        column=column,
                        table=None,
                        database=database,
                        datasette=self.ds,
                    )
                    if plugin_value is not None:
                        display_value = plugin_value
                    else:
                        if value in ("", None):
                            display_value = jinja2.Markup(" ")
                        elif is_url(str(display_value).strip()):
                            display_value = jinja2.Markup(
                                '<a href="{url}">{url}</a>'.format(
                                    url=jinja2.escape(value.strip())))
                        elif isinstance(display_value, bytes):
                            blob_url = path_with_format(
                                request=request,
                                format="blob",
                                extra_qs={
                                    "_blob_column":
                                    column,
                                    "_blob_hash":
                                    hashlib.sha256(display_value).hexdigest(),
                                },
                            )
                            display_value = jinja2.Markup(
                                '<a class="blob-download" href="{}">&lt;Binary:&nbsp;{}&nbsp;byte{}&gt;</a>'
                                .format(
                                    blob_url,
                                    len(display_value),
                                    "" if len(value) == 1 else "s",
                                ))
                    display_row.append(display_value)
                display_rows.append(display_row)

            # Show 'Edit SQL' button only if:
            # - User is allowed to execute SQL
            # - SQL is an approved SELECT statement
            # - No magic parameters, so no :_ in the SQL string
            edit_sql_url = None
            is_validated_sql = False
            try:
                validate_sql_select(sql)
                is_validated_sql = True
            except InvalidSql:
                pass
            if allow_execute_sql and is_validated_sql and ":_" not in sql:
                edit_sql_url = (self.ds.urls.database(database) + "?" +
                                urlencode({
                                    **{
                                        "sql": sql,
                                    },
                                    **named_parameter_values,
                                }))
            return {
                "display_rows": display_rows,
                "custom_sql": True,
                "named_parameter_values": named_parameter_values,
                "editable": editable,
                "canned_query": canned_query,
                "edit_sql_url": edit_sql_url,
                "metadata": metadata,
                "config": self.ds.config_dict(),
                "request": request,
                "path_with_added_args": path_with_added_args,
                "path_with_removed_args": path_with_removed_args,
                "hide_sql": "_hide_sql" in params,
            }

        return (
            {
                "database": database,
                "query_name": canned_query,
                "rows": results.rows,
                "truncated": results.truncated,
                "columns": columns,
                "query": {
                    "sql": sql,
                    "params": params
                },
                "private": private,
                "allow_execute_sql": allow_execute_sql,
            },
            extra_template,
            templates,
        )
示例#5
0
 async def post(request):
     if request.method == "GET":
         return Response.html(request.scope["csrftoken"]())
     else:
         return Response.json(await request.post_vars())
async def schema_versions_json(datasette):
    return Response.json(await _schema_versions(datasette))
示例#7
0
def test_response_json():
    response = Response.json({"this_is": "json"})
    assert 200 == response.status
    assert '{"this_is": "json"}' == response.body
    assert "application/json; charset=utf-8" == response.content_type
示例#8
0
 def asgi_scope(scope):
     return Response.json(scope, default=repr)
示例#9
0
async def view_graphql(request, datasette):
    if request.method == "OPTIONS":
        return Response.text("ok",
                             headers=CORS_HEADERS if datasette.cors else {})

    body = await post_body(request)
    database = request.url_vars.get("database")

    try:
        datasette.get_database(database)
    except KeyError:
        raise NotFound("Database does not exist")

    if not body and "text/html" in request.headers.get("accept", ""):
        return Response.html(
            await datasette.render_template("graphiql.html", {
                "database": database,
            },
                                            request=request),
            headers=CORS_HEADERS if datasette.cors else {},
        )

    schema = await schema_for_database_via_cache(datasette, database=database)

    if request.args.get("schema"):
        return Response.text(print_schema(schema))

    incoming = {}
    if body:
        incoming = json.loads(body)
        query = incoming.get("query")
        variables = incoming.get("variables")
        operation_name = incoming.get("operationName")
    else:
        query = request.args.get("query")
        variables = request.args.get("variables", "")
        if variables:
            variables = json.loads(variables)
        operation_name = request.args.get("operationName")

    if not query:
        return Response.json(
            {"error": "Missing query"},
            status=400,
            headers=CORS_HEADERS if datasette.cors else {},
        )

    config = datasette.plugin_config("datasette-graphql") or {}
    context = {
        "time_started":
        time.monotonic(),
        "time_limit_ms":
        config.get("time_limit_ms") or DEFAULT_TIME_LIMIT_MS,
        "num_queries_executed":
        0,
        "num_queries_limit":
        config.get("num_queries_limit") or DEFAULT_NUM_QUERIES_LIMIT,
    }

    result = await graphql(
        schema,
        query,
        operation_name=operation_name,
        variable_values=variables,
        context_value=context,
        executor=AsyncioExecutor(),
        return_promise=True,
    )
    response = {"data": result.data}
    if result.errors:
        response["errors"] = [format_error(error) for error in result.errors]

    return Response.json(
        response,
        status=200 if not result.errors else 500,
        headers=CORS_HEADERS if datasette.cors else {},
    )
示例#10
0
async def csv_importer(scope, receive, datasette, request):
    """
    CSV Importer initiates a CSV import using the CLI tool CSVs-to-SQlite.
    Accepts HTTP POST with form data as follows:

    `csv` should contain the CSV file to be imported

    `database` is the name of the database file to be written to. If blank,
    we will choose a name base on the uploaded file name.

    If `xhr` is set to `1` we will assume a JS client is running and this
    endpoint will return JSON (as opposed to rendering a different HTML
    template without `xhr` set to `1`).

    A valid `csrftoken` needs to be provided.

    Any form input starting with "-" are interpreted as arguments to
    the CLI tool. Such arguments are considered single-toggle arguments
    that don't use any parameters, so "--on true" will be interpreted
    as running the tool with "--on".
    """
    if not await datasette.permission_allowed(
        request.actor, "csv-importer", default=False
    ):
        raise Forbidden("Permission denied for csv-importer")

    plugin_config = datasette.plugin_config(
        "datasette-csv-importer"
    ) or {}
    print("plugin_config", plugin_config)

    db = get_status_database(datasette, plugin_config)
    status_table = get_status_table(plugin_config)

    # We need the ds_request to pass to render_template for CSRF tokens
    ds_request = request

    # We use the Starlette request object to handle file uploads
    starlette_request = Request(scope, receive)
    # If we aren't uploading a new file (POST), show uploader screen
    if starlette_request.method != "POST":
        print("plugin_config", plugin_config)
        return Response.html(
            await datasette.render_template(
                "csv_importer.html", {}, request=ds_request
            )
        )

    formdata = await starlette_request.form()
    csv = formdata["csv"]

    # csv.file is a SpooledTemporaryFile. csv.filename is the filename
    filename = csv.filename
    basename = os.path.splitext(filename)[0]
    if "database" in formdata and formdata["database"]:
        basename = formdata["database"]

    outfile_db = os.path.join(get_dbpath(plugin_config), f"{basename}.db")

    if basename in datasette.databases:
        global_access = await datasette.permission_allowed(
            request.actor, "view-database", default=False
        )
        specific_access = await datasette.permission_allowed(
            request.actor, "view-database", (basename,), default=False
        )
        if not specific_access and not global_access:
            raise Forbidden("view-database access required for existing database!")

    task_id = str(uuid.uuid4())
    def insert_initial_record(conn):
        database = sqlite_utils.Database(conn)
        database[status_table].insert(
            {
                "id": task_id,
                "filename": filename,
                "dbname": basename,
                "started": str(datetime.datetime.utcnow()),
                "completed": None,
                "exitcode": -1,
                "status": "in-progress",
                "message": "Setting up import...",
                "output": None,
            },
            pk="id",
            alter=True,
        )
    await db.execute_write_fn(insert_initial_record, block=True)

    csv_fields = [
        "--primary-key", "--fts", "--index", "--date", "--datetime",
        "--datetime-format"
    ]
    args = []
    for key, value in formdata.items():
        if not key.startswith("-"):
            continue
        # this is a toggle/flag arg with no param
        if value is True or value == "true":
            args.append(key)
            continue
        if not value or value == "false":
            continue
        # we have a columns list field, split it up w/ dupe keys
        # TODO: This screws up when column names have commas in them!
        if "," in value and key in csv_fields:
            for v in re.split(r"/,\s*/", value):
                if not v or not v.strip():
                    continue
                args.append(key)
                args.append(value)
            continue
        args.append(key)
        args.append(value)

    def set_status(conn, message):
        print("Setting status", message)
        status_database = sqlite_utils.Database(conn)
        status_database[status_table].update(
            task_id,
            {
                "message": message,
            },
        )
        print("Successfully set status!")

    # run the command, capture its output
    def run_cli_import(conn):
        set_status(conn, "Running CSV import...")

        exitcode = -1
        output = None
        message = None
        try:
            with tempfile.NamedTemporaryFile() as temp:
                temp.write(csv.file.read())
                temp.flush()

                args.append(temp.name)
                args.append(outfile_db)

                # run the import command, capturing stdout
                with Capturing() as output:
                    exitcode = command.main(
                        args=args, prog_name="cli", standalone_mode=False
                    )
                    if exitcode is not None:
                        exitcode = int(exitcode)
                    # detect a failure to write DB where tool returns success
                    # code this makes it so we don't have to read the
                    # CLI output to figure out if the command succeeded or not
                    if not os.path.exists(outfile_db) and not exitcode:
                        exitcode = -2
        except Exception as e:
            print("Exception", e)
            exitcode = -2
            message = str(e)

        set_status(conn, "Adding database to internal DB list...")
        # Adds this DB to the internel DBs list
        if basename not in datasette.databases:
            print("Adding database", basename)
            datasette.add_database(
                Database(datasette, path=outfile_db, is_mutable=True),
                name=basename,
            )
            # print("Database added successfully!")
            # try:
            #     loop = asyncio.get_running_loop()
            # except RuntimeError:
            #     loop = asyncio.new_event_loop()
            # print("Running schema refresh...")
            # loop.run_until_complete(datasette.refresh_schemas())
            # print("Schema refresh complete!")

        csvspath = get_csvspath(plugin_config)
        if csvspath:
            set_status(conn, "Saving CSV to server directory...")
            csv_db_name = args[-1].replace(".db", "")
            csv_table_name = csv_db_name
            if "-t" in formdata:
                csv_table_name = formdata["-t"]
            if "--table" in formdata:
                csv_table_name = formdata["--table"]
            outfile_csv = os.path.join(
                csvspath, f"{csv_db_name}--{csv_table_name}.csv"
            )
            outfile_args = os.path.join(
                csvspath, f"{csv_db_name}--{csv_table_name}.json"
            )

            # success! save our configs and CSV
            print("Writing CSV", outfile_csv)
            with open(outfile_csv, "wb") as f:
                csv.file.seek(0)
                f.write(csv.file.read())

            print("Writing args to", outfile_args)
            with open(outfile_args, "w") as f:
                f.write(json.dumps(args, indent=2))

        if get_use_live_metadata(plugin_config):
            print("Running live-config integration...")
            set_status(
                conn, "Running live-config plugin integration..."
            )
            # add the permission table, grant access to current user only
            # this will create the DB if not exists
            print("Opening DB:", outfile_db)
            out_db = sqlite_utils.Database(sqlite3.connect(outfile_db))
            try:
                out_db["__metadata"].get("allow")
            except sqlite_utils.db.NotFoundError:
                # don't overwrite, only create
                out_db["__metadata"].insert({
                    "key": "tables",
                    "value": json.dumps({
                        "__metadata": {
                            "hidden": True
                        }
                    }),
                }, pk="key", alter=True, replace=False, ignore=True)

        if get_use_live_permissions(plugin_config):
            print("Setting live-permissions plugin status...")
            set_status(
                conn,
                "Running live-permissions plugin integration..."
            )
            print("Running set_perms_for_live_permissions with basename:", basename)
            set_perms_for_live_permissions(datasette, request.actor, basename)
            print("set_perms_for_live_permissions complete!")

        if not message:
            message = "Import successful!" if not exitcode else "Failure"

        print("Updating status", message)
        status_database = sqlite_utils.Database(conn)
        status_database[status_table].update(
            task_id,
            {
                "completed": str(datetime.datetime.utcnow()),
                "exitcode": exitcode,
                "status": "completed",
                "message": message,
                "output": "\n".join(output),
            },
        )

    await db.execute_write_fn(run_cli_import)

    if formdata.get("xhr"):
        return Response.json(
            {
                "url": datasette.urls.path("/{filename}".format(
                    filename=quote_plus(filename),
                )),
                "status_database_path": quote_plus(db.name),
                "status_table": quote_plus(status_table),
                "task_id": task_id,
            }
        )

    return Response.html(
        await datasette.render_template(
            "csv_importer_done.html", {
                "filename": filename,
                "task_id": task_id,
            },
        )
    )
async def render_response(request):
    if request.args.get("_broken"):
        return "this should break"
    return Response.json({"this_is": "json"})