Exemple #1
0
 async def get(self, request):
     token = request.args.get("token") or ""
     if not self.ds._root_token:
         raise Forbidden("Root token has already been used")
     if secrets.compare_digest(token, self.ds._root_token):
         self.ds._root_token = None
         response = Response.redirect("/")
         response.set_cookie(
             "ds_actor", self.ds.sign({"a": {"id": "root"}}, "actor")
         )
         return response
     else:
         raise Forbidden("Invalid token")
Exemple #2
0
async def check_permission_instance(request, datasette):
    if (await datasette.permission_allowed(
            request.actor,
            "view-instance",
            default=None,
    )) is False:
        raise Forbidden("view-instance denied")
Exemple #3
0
 async def view_get(self, request, database, hash, correct_hash_present,
                    **kwargs):
     await self.check_permissions(
         request,
         [
             ("view-database-download", database),
             ("view-database", database),
             "view-instance",
         ],
     )
     if database not in self.ds.databases:
         raise DatasetteError("Invalid database", status=404)
     db = self.ds.databases[database]
     if db.is_memory:
         raise DatasetteError("Cannot download in-memory databases",
                              status=404)
     if not self.ds.setting("allow_download") or db.is_mutable:
         raise Forbidden("Database download is forbidden")
     if not db.path:
         raise DatasetteError("Cannot download database", status=404)
     filepath = db.path
     headers = {}
     if self.ds.cors:
         headers["Access-Control-Allow-Origin"] = "*"
     headers["Transfer-Encoding"] = "chunked"
     return AsgiFileDownload(
         filepath,
         filename=os.path.basename(filepath),
         content_type="application/octet-stream",
         headers=headers,
     )
Exemple #4
0
async def check_permissions(request, permissions, ds):
    "permissions is a list of (action, resource) tuples or 'action' strings"
    "from https://github.com/simonw/datasette/blob/main/datasette/views/base.py#L69"
    for permission in permissions:
        if isinstance(permission, str):
            action = permission
            resource = None
        elif isinstance(permission, (tuple, list)) and len(permission) == 2:
            action, resource = permission
        else:
            assert (
                False
            ), "permission should be string or tuple of two items: {}".format(
                repr(permission)
            )
        ok = await ds.permission_allowed(
            request.actor,
            action,
            resource=resource,
            default=None,
        )
        if ok is not None:
            if ok:
                return
            else:
                raise Forbidden(action)
Exemple #5
0
 async def check_permissions(self, request, permissions):
     "permissions is a list of (action, resource) tuples or 'action' strings"
     for permission in permissions:
         if isinstance(permission, str):
             action = permission
             resource = None
         elif isinstance(permission,
                         (tuple, list)) and len(permission) == 2:
             action, resource = permission
         else:
             assert (
                 False
             ), "permission should be string or tuple of two items: {}".format(
                 repr(permission))
         ok = await self.ds.permission_allowed(
             request.actor,
             action,
             resource=resource,
             default=None,
         )
         if ok is not None:
             if ok:
                 return
             else:
                 raise Forbidden(action)
Exemple #6
0
async def check_permission_execute_sql(request, datasette, database):
    if (await datasette.permission_allowed(
            request.actor,
            "execute-sql",
            resource=database,
            default=None,
    )) is False:
        raise Forbidden("execute-sql denied")
Exemple #7
0
 async def get(self, request):
     await self.check_permission(request, "view-instance")
     if not await self.ds.permission_allowed(request.actor, "permissions-debug"):
         raise Forbidden("Permission denied")
     return await self.render(
         ["permissions_debug.html"],
         request,
         {"permission_checks": reversed(self.ds._permission_checks)},
     )
Exemple #8
0
 async def check_permission(self, request, action, resource=None):
     ok = await self.ds.permission_allowed(
         request.actor,
         action,
         resource=resource,
         default=True,
     )
     if not ok:
         raise Forbidden(action)
Exemple #9
0
 async def get(self, request):
     await self.check_permission(request, "view-instance")
     if not await self.ds.permission_allowed(request.actor, "permissions-debug"):
         raise Forbidden("Permission denied")
     return await self.render(
         ["permissions_debug.html"],
         request,
         # list() avoids error if check is performed during template render:
         {"permission_checks": list(reversed(self.ds._permission_checks))},
     )
Exemple #10
0
async def manage_db_group(scope, receive, datasette, request):
    db_name = unquote_plus(request.url_vars["database"])
    if not await datasette.permission_allowed(
        request.actor, "live-permissions-edit", db_name, default=False
    ):
        raise Forbidden("Permission denied")

    db = get_db(datasette)

    group_id = None
    results = db["groups"].rows_where("name=?", [f"DB Access: {db_name}"])
    for row in results:
        group_id = row["id"]
        break

    assert db_name in datasette.databases, "Non-existant database!"

    if not group_id and db_name not in BLOCKED_DB_ACTIONS:
        db["groups"].insert({
            "name": f"DB Access: {db_name}",
        }, pk="id", replace=True)
        return await manage_db_group(scope, receive, datasette, request)

    if request.method in ["POST", "DELETE"]:
        formdata = await request.post_vars()
        user_id = formdata["user_id"]

        if request.method == "POST":
            db["group_membership"].insert({
                "group_id": group_id,
                "user_id": user_id,
            }, replace=True)
        elif request.method == "DELETE":
            db["group_membership"].delete((group_id, user_id))
            return Response.text('', status=204)
        else:
            raise NotImplementedError(f"Bad method: {request.method}")

    perms_query = """
        select distinct user_id as id, lookup, value, description
        from group_membership join users
        on group_membership.user_id = users.id
        where group_membership.group_id=?
    """
    users = db.execute(perms_query, (group_id,))
    return Response.html(
        await datasette.render_template(
            "database_management.html", {
                "database": db_name,
                "users": users,
            }, request=request
        )
    )
Exemple #11
0
async def live_config(scope, receive, datasette, request):
    submit_url = request.path
    database_name = unquote_plus(
        request.url_vars.get("database_name", "global"))
    meta_in_db = True if request.args.get("meta_in_db") else False
    if meta_in_db:
        submit_url += '?meta_in_db=true'
    table_name = "global"
    perm_args = ()
    if database_name:
        perm_args = (database_name, )
    if not await datasette.permission_allowed(
            request.actor, "live-config", *perm_args, default=False):
        raise Forbidden("Permission denied for live-config")

    if request.method != "POST":
        # TODO: Decide if we use this or pull saved config
        metadata = datasette.metadata()
        if database_name and database_name != "global":
            metadata = metadata["databases"].get(database_name, {})
        return Response.html(await datasette.render_template(
            "config_editor.html", {
                "database_name": database_name,
                "configJSON": json.dumps(metadata),
                "submit_url": submit_url,
            },
            request=request))

    formdata = await request.post_vars()
    if meta_in_db and database_name in datasette.databases:
        db_meta = json.loads(formdata["config"])
        update_db_metadata(datasette.databases[database_name], db_meta)
    else:
        update_live_config_db(datasette, database_name, table_name,
                              formdata["config"])

    metadata = datasette.metadata()
    if database_name != "global":
        metadata = metadata["databases"][database_name]
    return Response.html(await datasette.render_template(
        "config_editor.html", {
            "database_name": database_name,
            "message": "Configuration updated successfully!",
            "status": "success",
            "configJSON": json.dumps(metadata),
            "submit_url": submit_url,
        },
        request=request))
async def csv_importer_status(scope, receive, datasette, request):
    if not await datasette.permission_allowed(
        request.actor, "csv-importer", default=False
    ):
        raise Forbidden("Permission denied for csv-importer")

    plugin_config = datasette.plugin_config(
        "datasette-csv-importer"
    ) or {}

    db = get_status_database(datasette, plugin_config)
    status_table = get_status_table(plugin_config)

    query = f"select * from {status_table} where id = ? limit 1"
    result = await db.execute(query, (request.url_vars["task_id"],))
    return Response.json(dict(result.first()))
Exemple #13
0
 async def view_get(self, request, database, hash, correct_hash_present, **kwargs):
     await self.check_permission(request, "view-instance")
     await self.check_permission(request, "view-database", database)
     await self.check_permission(request, "view-database-download", database)
     if database not in self.ds.databases:
         raise DatasetteError("Invalid database", status=404)
     db = self.ds.databases[database]
     if db.is_memory:
         raise DatasetteError("Cannot download :memory: database", status=404)
     if not self.ds.config("allow_download") or db.is_mutable:
         raise Forbidden("Database download is forbidden")
     if not db.path:
         raise DatasetteError("Cannot download database", status=404)
     filepath = db.path
     return AsgiFileDownload(
         filepath,
         filename=os.path.basename(filepath),
         content_type="application/octet-stream",
     )
Exemple #14
0
async def perms_crud(scope, receive, datasette, request):
    table = request.url_vars["table"]
    default_next = datasette.urls.path(f"/live_permissions/{table}")
    next = request.args.get("next", default_next)
    obj_id = request.url_vars["id"]

    if not await datasette.permission_allowed(
        request.actor, "live-permissions-edit", default=False
    ):
        raise Forbidden("Permission denied")

    assert table and obj_id, "Invalid URL"
    assert request.method in ["POST", "DELETE"], "Bad method"
    assert table in KNOWN_TABLES, "Bad table name provided"

    db = get_db(datasette)
    # POST is just dual update/create (depending on if id=="new")
    if request.method == "POST":
        formdata = await request.post_vars()

        if "csrftoken" in formdata:
            del formdata["csrftoken"]

        pk = "id"
        if table == "group_membership":
            pk = ("group_id", "user_id")
        db[table].insert(
            formdata, pk=pk, alter=False, replace=False
        )
        return Response.redirect(next)

    elif request.method == "DELETE":
        try:
            obj_id = int(obj_id)
        except ValueError:
            obj_id = tuple(int(i) for i in obj_id.split(","))
        db[table].delete(obj_id)
        return Response.text('', status=204)

    else:
        raise NotImplementedError("Bad HTTP method!")
Exemple #15
0
 async def get(self, request):
     database = tilde_decode(request.url_vars["database"])
     await self.ds.ensure_permissions(
         request.actor,
         [
             ("view-database-download", database),
             ("view-database", database),
             "view-instance",
         ],
     )
     try:
         db = self.ds.get_database(route=database)
     except KeyError:
         raise DatasetteError("Invalid database", status=404)
     if db.is_memory:
         raise DatasetteError("Cannot download in-memory databases",
                              status=404)
     if not self.ds.setting("allow_download") or db.is_mutable:
         raise Forbidden("Database download is forbidden")
     if not db.path:
         raise DatasetteError("Cannot download database", status=404)
     filepath = db.path
     headers = {}
     if self.ds.cors:
         add_cors_headers(headers)
     if db.hash:
         etag = '"{}"'.format(db.hash)
         headers["Etag"] = etag
         # Has user seen this already?
         if_none_match = request.headers.get("if-none-match")
         if if_none_match and if_none_match == etag:
             return Response("", status=304)
     headers["Transfer-Encoding"] = "chunked"
     return AsgiFileDownload(
         filepath,
         filename=os.path.basename(filepath),
         content_type="application/octet-stream",
         headers=headers,
     )
Exemple #16
0
    async def data(
        self,
        request,
        sql,
        editable=True,
        canned_query=None,
        metadata=None,
        _size=None,
        named_parameters=None,
        write=False,
    ):
        database_route = tilde_decode(request.url_vars["database"])
        try:
            db = self.ds.get_database(route=database_route)
        except KeyError:
            raise NotFound("Database not found: {}".format(database_route))
        database = db.name
        params = {key: request.args.get(key) for key in request.args}
        if "sql" in params:
            params.pop("sql")
        if "_shape" in params:
            params.pop("_shape")

        private = False
        if canned_query:
            # Respect canned query permissions
            await self.ds.ensure_permissions(
                request.actor,
                [
                    ("view-query", (database, canned_query)),
                    ("view-database", database),
                    "view-instance",
                ],
            )
            private = not await self.ds.permission_allowed(
                None, "view-query", (database, canned_query), default=True)
        else:
            await self.ds.ensure_permissions(request.actor,
                                             [("execute-sql", database)])

        # Extract any :named parameters
        named_parameters = named_parameters or await derive_named_parameters(
            self.ds.get_database(database), sql)
        named_parameter_values = {
            named_parameter: params.get(named_parameter) or ""
            for named_parameter in named_parameters
            if not named_parameter.startswith("_")
        }

        # Set to blank string if missing from params
        for named_parameter in named_parameters:
            if named_parameter not in params and not named_parameter.startswith(
                    "_"):
                params[named_parameter] = ""

        extra_args = {}
        if params.get("_timelimit"):
            extra_args["custom_time_limit"] = int(params["_timelimit"])
        if _size:
            extra_args["page_size"] = _size

        templates = [f"query-{to_css_class(database)}.html", "query.html"]
        if canned_query:
            templates.insert(
                0,
                f"query-{to_css_class(database)}-{to_css_class(canned_query)}.html",
            )

        query_error = None

        # Execute query - as write or as read
        if write:
            if request.method == "POST":
                # If database is immutable, return an error
                if not db.is_mutable:
                    raise Forbidden("Database is immutable")
                body = await request.post_body()
                body = body.decode("utf-8").strip()
                if body.startswith("{") and body.endswith("}"):
                    params = json.loads(body)
                    # But we want key=value strings
                    for key, value in params.items():
                        params[key] = str(value)
                else:
                    params = dict(parse_qsl(body, keep_blank_values=True))
                # Should we return JSON?
                should_return_json = (request.headers.get("accept")
                                      == "application/json"
                                      or request.args.get("_json")
                                      or params.get("_json"))
                if canned_query:
                    params_for_query = MagicParameters(params, request,
                                                       self.ds)
                else:
                    params_for_query = params
                ok = None
                try:
                    cursor = await self.ds.databases[database].execute_write(
                        sql, params_for_query)
                    message = metadata.get(
                        "on_success_message"
                    ) or "Query executed, {} row{} affected".format(
                        cursor.rowcount, "" if cursor.rowcount == 1 else "s")
                    message_type = self.ds.INFO
                    redirect_url = metadata.get("on_success_redirect")
                    ok = True
                except Exception as e:
                    message = metadata.get("on_error_message") or str(e)
                    message_type = self.ds.ERROR
                    redirect_url = metadata.get("on_error_redirect")
                    ok = False
                if should_return_json:
                    return Response.json({
                        "ok": ok,
                        "message": message,
                        "redirect": redirect_url,
                    })
                else:
                    self.ds.add_message(request, message, message_type)
                    return self.redirect(request, redirect_url or request.path)
            else:

                async def extra_template():
                    return {
                        "request": request,
                        "db_is_immutable": not db.is_mutable,
                        "path_with_added_args": path_with_added_args,
                        "path_with_removed_args": path_with_removed_args,
                        "named_parameter_values": named_parameter_values,
                        "canned_query": canned_query,
                        "success_message": request.args.get("_success") or "",
                        "canned_write": True,
                    }

                return (
                    {
                        "database": database,
                        "rows": [],
                        "truncated": False,
                        "columns": [],
                        "query": {
                            "sql": sql,
                            "params": params
                        },
                        "private": private,
                    },
                    extra_template,
                    templates,
                )
        else:  # Not a write
            if canned_query:
                params_for_query = MagicParameters(params, request, self.ds)
            else:
                params_for_query = params
            try:
                results = await self.ds.execute(database,
                                                sql,
                                                params_for_query,
                                                truncate=True,
                                                **extra_args)
                columns = [r[0] for r in results.description]
            except sqlite3.DatabaseError as e:
                query_error = e
                results = None
                columns = []

        allow_execute_sql = await self.ds.permission_allowed(request.actor,
                                                             "execute-sql",
                                                             database,
                                                             default=True)

        async def extra_template():
            display_rows = []
            for row in results.rows if results else []:
                display_row = []
                for column, value in zip(results.columns, row):
                    display_value = value
                    # Let the plugins have a go
                    # pylint: disable=no-member
                    plugin_display_value = None
                    for candidate in pm.hook.render_cell(
                            row=row,
                            value=value,
                            column=column,
                            table=None,
                            database=database,
                            datasette=self.ds,
                    ):
                        candidate = await await_me_maybe(candidate)
                        if candidate is not None:
                            plugin_display_value = candidate
                            break
                    if plugin_display_value is not None:
                        display_value = plugin_display_value
                    else:
                        if value in ("", None):
                            display_value = Markup(" ")
                        elif is_url(str(display_value).strip()):
                            display_value = Markup(
                                '<a href="{url}">{url}</a>'.format(
                                    url=escape(value.strip())))
                        elif isinstance(display_value, bytes):
                            blob_url = path_with_format(
                                request=request,
                                format="blob",
                                extra_qs={
                                    "_blob_column":
                                    column,
                                    "_blob_hash":
                                    hashlib.sha256(display_value).hexdigest(),
                                },
                            )
                            formatted = format_bytes(len(value))
                            display_value = markupsafe.Markup(
                                '<a class="blob-download" href="{}"{}>&lt;Binary:&nbsp;{:,}&nbsp;byte{}&gt;</a>'
                                .format(
                                    blob_url,
                                    ' title="{}"'.format(formatted)
                                    if "bytes" not in formatted else "",
                                    len(value),
                                    "" if len(value) == 1 else "s",
                                ))

                    display_row.append(display_value)
                display_rows.append(display_row)

            # Show 'Edit SQL' button only if:
            # - User is allowed to execute SQL
            # - SQL is an approved SELECT statement
            # - No magic parameters, so no :_ in the SQL string
            edit_sql_url = None
            is_validated_sql = False
            try:
                validate_sql_select(sql)
                is_validated_sql = True
            except InvalidSql:
                pass
            if allow_execute_sql and is_validated_sql and ":_" not in sql:
                edit_sql_url = (self.ds.urls.database(database) + "?" +
                                urlencode({
                                    **{
                                        "sql": sql,
                                    },
                                    **named_parameter_values,
                                }))

            show_hide_hidden = ""
            if metadata.get("hide_sql"):
                if bool(params.get("_show_sql")):
                    show_hide_link = path_with_removed_args(
                        request, {"_show_sql"})
                    show_hide_text = "hide"
                    show_hide_hidden = (
                        '<input type="hidden" name="_show_sql" value="1">')
                else:
                    show_hide_link = path_with_added_args(
                        request, {"_show_sql": 1})
                    show_hide_text = "show"
            else:
                if bool(params.get("_hide_sql")):
                    show_hide_link = path_with_removed_args(
                        request, {"_hide_sql"})
                    show_hide_text = "show"
                    show_hide_hidden = (
                        '<input type="hidden" name="_hide_sql" value="1">')
                else:
                    show_hide_link = path_with_added_args(
                        request, {"_hide_sql": 1})
                    show_hide_text = "hide"
            hide_sql = show_hide_text == "show"
            return {
                "display_rows": display_rows,
                "custom_sql": True,
                "named_parameter_values": named_parameter_values,
                "editable": editable,
                "canned_query": canned_query,
                "edit_sql_url": edit_sql_url,
                "metadata": metadata,
                "settings": self.ds.settings_dict(),
                "request": request,
                "show_hide_link": self.ds.urls.path(show_hide_link),
                "show_hide_text": show_hide_text,
                "show_hide_hidden": markupsafe.Markup(show_hide_hidden),
                "hide_sql": hide_sql,
            }

        return (
            {
                "ok": not query_error,
                "database": database,
                "query_name": canned_query,
                "rows": results.rows if results else [],
                "truncated": results.truncated if results else False,
                "columns": columns,
                "query": {
                    "sql": sql,
                    "params": params
                },
                "error": str(query_error) if query_error else None,
                "private": private,
                "allow_execute_sql": allow_execute_sql,
            },
            extra_template,
            templates,
            400 if query_error else 200,
        )
async def import_table(request, datasette):
    if not await datasette.permission_allowed(
            request.actor, "import-table", default=False):
        raise Forbidden("Permission denied for import-table")

    mutable_databases = [
        db for db in datasette.databases.values()
        if db.is_mutable and db.name != "_internal"
    ]
    error = None

    if request.method == "POST":
        post_vars = await request.post_vars()
        url = post_vars.get("url")
        try:
            table_name, rows, pks, total, next_page = await load_first_page(url
                                                                            )
        except Exception as e:
            error = str(e)
        else:
            primary_key = (pks[0] if len(pks) == 1 else pks) or "rowid"

            def start_table(conn):
                db = sqlite_utils.Database(conn)
                with db.conn:
                    db[table_name].insert_all(rows, pk=primary_key)

            database = datasette.get_database(post_vars.get("database"))
            await database.execute_write_fn(start_table, block=True)

            # This is a bit of a mess. My first implementation of this worked
            # by starting a function on the write thread which fetched each
            # page in turn and wrote them to the database synchronously.
            #
            # Problem: the write thread can only run one function at a time -
            # and for a large number of rows this function blocked anyone
            # else from scheduling a write until it had finished.
            #
            # This more complex version instead runs the paginated HTTP gets
            # in an asyncio task, and has that task schedule a write operation
            # for each individual batch of rows that it receives.

            def do_the_rest(url):
                async def inner_async():
                    nonlocal url

                    def row_writer(rows):
                        def inner(conn):
                            db = sqlite_utils.Database(conn)
                            with db.conn:
                                db[table_name].insert_all(rows)

                        return inner

                    while url:
                        async with httpx.AsyncClient() as client:
                            response = await client.get(url)
                            data = response.json()
                            if data.get("rows"):
                                await database.execute_write_fn(
                                    row_writer(data["rows"]))
                            url = data.get("next_url")

                return inner_async()

            if next_page:
                asyncio.ensure_future(do_the_rest(next_page))

            return Response.redirect("/{}/{}?_import_expected_rows={}".format(
                database.name, quote_plus(table_name), total))

    return Response.html(await datasette.render_template(
        "datasette_import_table.html",
        {
            "databases": [m.name for m in mutable_databases],
            "error": error,
            "database": request.args.get("database"),
        },
        request=request,
    ))
async def csv_importer(scope, receive, datasette, request):
    """
    CSV Importer initiates a CSV import using the CLI tool CSVs-to-SQlite.
    Accepts HTTP POST with form data as follows:

    `csv` should contain the CSV file to be imported

    `database` is the name of the database file to be written to. If blank,
    we will choose a name base on the uploaded file name.

    If `xhr` is set to `1` we will assume a JS client is running and this
    endpoint will return JSON (as opposed to rendering a different HTML
    template without `xhr` set to `1`).

    A valid `csrftoken` needs to be provided.

    Any form input starting with "-" are interpreted as arguments to
    the CLI tool. Such arguments are considered single-toggle arguments
    that don't use any parameters, so "--on true" will be interpreted
    as running the tool with "--on".
    """
    if not await datasette.permission_allowed(
        request.actor, "csv-importer", default=False
    ):
        raise Forbidden("Permission denied for csv-importer")

    plugin_config = datasette.plugin_config(
        "datasette-csv-importer"
    ) or {}
    print("plugin_config", plugin_config)

    db = get_status_database(datasette, plugin_config)
    status_table = get_status_table(plugin_config)

    # We need the ds_request to pass to render_template for CSRF tokens
    ds_request = request

    # We use the Starlette request object to handle file uploads
    starlette_request = Request(scope, receive)
    # If we aren't uploading a new file (POST), show uploader screen
    if starlette_request.method != "POST":
        print("plugin_config", plugin_config)
        return Response.html(
            await datasette.render_template(
                "csv_importer.html", {}, request=ds_request
            )
        )

    formdata = await starlette_request.form()
    csv = formdata["csv"]

    # csv.file is a SpooledTemporaryFile. csv.filename is the filename
    filename = csv.filename
    basename = os.path.splitext(filename)[0]
    if "database" in formdata and formdata["database"]:
        basename = formdata["database"]

    outfile_db = os.path.join(get_dbpath(plugin_config), f"{basename}.db")

    if basename in datasette.databases:
        global_access = await datasette.permission_allowed(
            request.actor, "view-database", default=False
        )
        specific_access = await datasette.permission_allowed(
            request.actor, "view-database", (basename,), default=False
        )
        if not specific_access and not global_access:
            raise Forbidden("view-database access required for existing database!")

    task_id = str(uuid.uuid4())
    def insert_initial_record(conn):
        database = sqlite_utils.Database(conn)
        database[status_table].insert(
            {
                "id": task_id,
                "filename": filename,
                "dbname": basename,
                "started": str(datetime.datetime.utcnow()),
                "completed": None,
                "exitcode": -1,
                "status": "in-progress",
                "message": "Setting up import...",
                "output": None,
            },
            pk="id",
            alter=True,
        )
    await db.execute_write_fn(insert_initial_record, block=True)

    csv_fields = [
        "--primary-key", "--fts", "--index", "--date", "--datetime",
        "--datetime-format"
    ]
    args = []
    for key, value in formdata.items():
        if not key.startswith("-"):
            continue
        # this is a toggle/flag arg with no param
        if value is True or value == "true":
            args.append(key)
            continue
        if not value or value == "false":
            continue
        # we have a columns list field, split it up w/ dupe keys
        # TODO: This screws up when column names have commas in them!
        if "," in value and key in csv_fields:
            for v in re.split(r"/,\s*/", value):
                if not v or not v.strip():
                    continue
                args.append(key)
                args.append(value)
            continue
        args.append(key)
        args.append(value)

    def set_status(conn, message):
        print("Setting status", message)
        status_database = sqlite_utils.Database(conn)
        status_database[status_table].update(
            task_id,
            {
                "message": message,
            },
        )
        print("Successfully set status!")

    # run the command, capture its output
    def run_cli_import(conn):
        set_status(conn, "Running CSV import...")

        exitcode = -1
        output = None
        message = None
        try:
            with tempfile.NamedTemporaryFile() as temp:
                temp.write(csv.file.read())
                temp.flush()

                args.append(temp.name)
                args.append(outfile_db)

                # run the import command, capturing stdout
                with Capturing() as output:
                    exitcode = command.main(
                        args=args, prog_name="cli", standalone_mode=False
                    )
                    if exitcode is not None:
                        exitcode = int(exitcode)
                    # detect a failure to write DB where tool returns success
                    # code this makes it so we don't have to read the
                    # CLI output to figure out if the command succeeded or not
                    if not os.path.exists(outfile_db) and not exitcode:
                        exitcode = -2
        except Exception as e:
            print("Exception", e)
            exitcode = -2
            message = str(e)

        set_status(conn, "Adding database to internal DB list...")
        # Adds this DB to the internel DBs list
        if basename not in datasette.databases:
            print("Adding database", basename)
            datasette.add_database(
                Database(datasette, path=outfile_db, is_mutable=True),
                name=basename,
            )
            # print("Database added successfully!")
            # try:
            #     loop = asyncio.get_running_loop()
            # except RuntimeError:
            #     loop = asyncio.new_event_loop()
            # print("Running schema refresh...")
            # loop.run_until_complete(datasette.refresh_schemas())
            # print("Schema refresh complete!")

        csvspath = get_csvspath(plugin_config)
        if csvspath:
            set_status(conn, "Saving CSV to server directory...")
            csv_db_name = args[-1].replace(".db", "")
            csv_table_name = csv_db_name
            if "-t" in formdata:
                csv_table_name = formdata["-t"]
            if "--table" in formdata:
                csv_table_name = formdata["--table"]
            outfile_csv = os.path.join(
                csvspath, f"{csv_db_name}--{csv_table_name}.csv"
            )
            outfile_args = os.path.join(
                csvspath, f"{csv_db_name}--{csv_table_name}.json"
            )

            # success! save our configs and CSV
            print("Writing CSV", outfile_csv)
            with open(outfile_csv, "wb") as f:
                csv.file.seek(0)
                f.write(csv.file.read())

            print("Writing args to", outfile_args)
            with open(outfile_args, "w") as f:
                f.write(json.dumps(args, indent=2))

        if get_use_live_metadata(plugin_config):
            print("Running live-config integration...")
            set_status(
                conn, "Running live-config plugin integration..."
            )
            # add the permission table, grant access to current user only
            # this will create the DB if not exists
            print("Opening DB:", outfile_db)
            out_db = sqlite_utils.Database(sqlite3.connect(outfile_db))
            try:
                out_db["__metadata"].get("allow")
            except sqlite_utils.db.NotFoundError:
                # don't overwrite, only create
                out_db["__metadata"].insert({
                    "key": "tables",
                    "value": json.dumps({
                        "__metadata": {
                            "hidden": True
                        }
                    }),
                }, pk="key", alter=True, replace=False, ignore=True)

        if get_use_live_permissions(plugin_config):
            print("Setting live-permissions plugin status...")
            set_status(
                conn,
                "Running live-permissions plugin integration..."
            )
            print("Running set_perms_for_live_permissions with basename:", basename)
            set_perms_for_live_permissions(datasette, request.actor, basename)
            print("set_perms_for_live_permissions complete!")

        if not message:
            message = "Import successful!" if not exitcode else "Failure"

        print("Updating status", message)
        status_database = sqlite_utils.Database(conn)
        status_database[status_table].update(
            task_id,
            {
                "completed": str(datetime.datetime.utcnow()),
                "exitcode": exitcode,
                "status": "completed",
                "message": message,
                "output": "\n".join(output),
            },
        )

    await db.execute_write_fn(run_cli_import)

    if formdata.get("xhr"):
        return Response.json(
            {
                "url": datasette.urls.path("/{filename}".format(
                    filename=quote_plus(filename),
                )),
                "status_database_path": quote_plus(db.name),
                "status_table": quote_plus(status_table),
                "task_id": task_id,
            }
        )

    return Response.html(
        await datasette.render_template(
            "csv_importer_done.html", {
                "filename": filename,
                "task_id": task_id,
            },
        )
    )