예제 #1
0
async def live_config(scope, receive, datasette, request):
    submit_url = request.path
    database_name = unquote_plus(
        request.url_vars.get("database_name", "global"))
    meta_in_db = True if request.args.get("meta_in_db") else False
    if meta_in_db:
        submit_url += '?meta_in_db=true'
    table_name = "global"
    perm_args = ()
    if database_name:
        perm_args = (database_name, )
    if not await datasette.permission_allowed(
            request.actor, "live-config", *perm_args, default=False):
        raise Forbidden("Permission denied for live-config")

    if request.method != "POST":
        # TODO: Decide if we use this or pull saved config
        metadata = datasette.metadata()
        if database_name and database_name != "global":
            metadata = metadata["databases"].get(database_name, {})
        return Response.html(await datasette.render_template(
            "config_editor.html", {
                "database_name": database_name,
                "configJSON": json.dumps(metadata),
                "submit_url": submit_url,
            },
            request=request))

    formdata = await request.post_vars()
    if meta_in_db and database_name in datasette.databases:
        db_meta = json.loads(formdata["config"])
        update_db_metadata(datasette.databases[database_name], db_meta)
    else:
        update_live_config_db(datasette, database_name, table_name,
                              formdata["config"])

    metadata = datasette.metadata()
    if database_name != "global":
        metadata = metadata["databases"][database_name]
    return Response.html(await datasette.render_template(
        "config_editor.html", {
            "database_name": database_name,
            "message": "Configuration updated successfully!",
            "status": "success",
            "configJSON": json.dumps(metadata),
            "submit_url": submit_url,
        },
        request=request))
예제 #2
0
async def dashboard_list(request, datasette):
    await check_permission_instance(request, datasette)
    config = datasette.plugin_config("datasette-dashboards") or {}
    return Response.html(await datasette.render_template(
        "dashboard_list.html",
        {"dashboards": config},
    ))
예제 #3
0
async def dashboard_chart(request, datasette):
    await check_permission_instance(request, datasette)

    config = datasette.plugin_config("datasette-dashboards") or {}
    slug = urllib.parse.unquote(request.url_vars["slug"])
    chart_slug = urllib.parse.unquote(request.url_vars["chart_slug"])

    try:
        dashboard = config[slug]
    except KeyError:
        raise NotFound(f"Dashboard not found: {slug}")

    try:
        chart = dashboard["charts"][chart_slug]
    except KeyError:
        raise NotFound(f"Chart does not exist: {chart_slug}")

    db = chart.get("db")
    if db:
        database = datasette.get_database(db)
        await check_permission_execute_sql(request, datasette, database)

    options_keys = get_dashboard_filters_keys(request, dashboard)
    query_string = generate_dashboard_filters_qs(request, options_keys)
    fill_chart_query_options(chart, options_keys)

    return Response.html(await datasette.render_template(
        "dashboard_chart.html",
        {
            "slug": slug,
            "query_string": query_string,
            "dashboard": dashboard,
            "chart": chart,
        },
    ))
예제 #4
0
 async def render(self, templates, request, context=None):
     context = context or {}
     template = self.ds.jinja_env.select_template(templates)
     template_context = {
         **context,
         **{
             "database_url":
             self.database_url,
             "csrftoken":
             request.scope["csrftoken"],
             "database_color":
             self.database_color,
             "show_messages":
             lambda: self.ds._show_messages(request),
             "select_templates": [
                 "{}{}".format(
                     "*" if template_name == template.name else "", template_name) for template_name in templates
             ],
         },
     }
     return Response.html(await
                          self.ds.render_template(template,
                                                  template_context,
                                                  request=request,
                                                  view_name=self.name))
예제 #5
0
    async def render(self, templates, request, context):
        template = self.ds.jinja_env.select_template(templates)
        select_templates = [
            "{}{}".format("*" if template_name == template.name else "",
                          template_name) for template_name in templates
        ]
        body_scripts = []
        # pylint: disable=no-member
        for script in pm.hook.extra_body_script(
                template=template.name,
                database=context.get("database"),
                table=context.get("table"),
                view_name=self.name,
                datasette=self.ds,
        ):
            body_scripts.append(jinja2.Markup(script))

        extra_template_vars = {}
        # pylint: disable=no-member
        for extra_vars in pm.hook.extra_template_vars(
                template=template.name,
                database=context.get("database"),
                table=context.get("table"),
                view_name=self.name,
                request=request,
                datasette=self.ds,
        ):
            if callable(extra_vars):
                extra_vars = extra_vars()
            if asyncio.iscoroutine(extra_vars):
                extra_vars = await extra_vars
            assert isinstance(extra_vars,
                              dict), "extra_vars is of type {}".format(
                                  type(extra_vars))
            extra_template_vars.update(extra_vars)

        return Response.html(await template.render_async({
            **context,
            **{
                "app_css_hash":
                self.ds.app_css_hash(),
                "select_templates":
                select_templates,
                "zip":
                zip,
                "body_scripts":
                body_scripts,
                "extra_css_urls":
                self._asset_urls("extra_css_urls", template, context),
                "extra_js_urls":
                self._asset_urls("extra_js_urls", template, context),
                "format_bytes":
                format_bytes,
                "database_url":
                self.database_url,
                "database_color":
                self.database_color,
            },
            **extra_template_vars,
        }))
예제 #6
0
파일: base.py 프로젝트: simonw/datasette
 async def render(self, templates, request, context=None):
     context = context or {}
     template = self.ds.jinja_env.select_template(templates)
     template_context = {
         **context,
         **{
             "database_color":
             self.database_color,
             "select_templates": [
                 f"{'*' if template_name == template.name else ''}{template_name}" for template_name in templates
             ],
         },
     }
     headers = {}
     if self.has_json_alternate:
         alternate_url_json = self.ds.absolute_url(
             request,
             self.ds.urls.path(
                 path_with_format(request=request, format="json")),
         )
         template_context["alternate_url_json"] = alternate_url_json
         headers.update({
             "Link":
             '{}; rel="alternate"; type="application/json+datasette"'.
             format(alternate_url_json)
         })
     return Response.html(
         await self.ds.render_template(
             template,
             template_context,
             request=request,
             view_name=self.name,
         ),
         headers=headers,
     )
예제 #7
0
async def tiles_stack_explorer(datasette):
    attribution = ""
    # Find min/max zoom by looking at the stack
    priority_order = await tiles_stack_database_order(datasette)
    min_zooms = []
    max_zooms = []
    attributions = []
    for db in priority_order:
        metadata = {
            row["name"]: row["value"]
            for row in (
                await db.execute("select name, value from metadata")).rows
        }
        if "minzoom" in metadata:
            min_zooms.append(int(metadata["minzoom"]))
        if "maxzoom" in metadata:
            max_zooms.append(int(metadata["maxzoom"]))
    # If all attributions are the same, use that - otherwise leave blank
    if len(set(attributions)) == 1:
        attribution = attributions[0]
    min_zoom = min(min_zooms)
    max_zoom = max(max_zooms)
    return Response.html(await datasette.render_template(
        "tiles_stack_explorer.html",
        {
            "default_latitude": 0,
            "default_longitude": 0,
            "default_zoom": min_zoom,
            "min_zoom": min_zoom,
            "max_zoom": max_zoom,
            "attribution": json.dumps(attribution),
        },
    ))
async def paprika_recipe_link(request, datasette, rows):
    row = rows[0]

    return Response.html(await datasette.render_template(
        "recipe.html",
        dict(zip(row.keys(), tuple(row))),
        request=request,
    ))
예제 #9
0
파일: base.py 프로젝트: zartata/datasette
 async def render(self, templates, request, context):
     template = self.ds.jinja_env.select_template(templates)
     template_context = {
         **context,
         **{
             "database_url": self.database_url,
             "database_color": self.database_color,
         },
     }
     if (request and request.args.get("_context")
             and self.ds.config("template_debug")):
         return Response.html("<pre>{}</pre>".format(
             jinja2.escape(
                 json.dumps(template_context, default=repr, indent=4))))
     return Response.html(await self.ds.render_template(template,
                                                        template_context,
                                                        request=request))
async def paprika_recipe_route(request, datasette):
    r = await datasette.client.get(
        f'paprika/recipes/{request.url_vars["recipe_id"]}.json')
    row_data = r.json()

    return Response.html(await datasette.render_template(
        "recipe.html",
        dict(zip(row_data["columns"], row_data["rows"][0])),
        request=request,
    ))
예제 #11
0
async def schema_versions(datasette, request):
    return Response.html(
        await datasette.render_template(
            "show_json.html",
            {
                "filename": "schema-versions.json",
                "data_json": json.dumps(await _schema_versions(datasette), indent=4),
            },
            request=request,
        )
    )
예제 #12
0
async def manage_db_group(scope, receive, datasette, request):
    db_name = unquote_plus(request.url_vars["database"])
    if not await datasette.permission_allowed(
        request.actor, "live-permissions-edit", db_name, default=False
    ):
        raise Forbidden("Permission denied")

    db = get_db(datasette)

    group_id = None
    results = db["groups"].rows_where("name=?", [f"DB Access: {db_name}"])
    for row in results:
        group_id = row["id"]
        break

    assert db_name in datasette.databases, "Non-existant database!"

    if not group_id and db_name not in BLOCKED_DB_ACTIONS:
        db["groups"].insert({
            "name": f"DB Access: {db_name}",
        }, pk="id", replace=True)
        return await manage_db_group(scope, receive, datasette, request)

    if request.method in ["POST", "DELETE"]:
        formdata = await request.post_vars()
        user_id = formdata["user_id"]

        if request.method == "POST":
            db["group_membership"].insert({
                "group_id": group_id,
                "user_id": user_id,
            }, replace=True)
        elif request.method == "DELETE":
            db["group_membership"].delete((group_id, user_id))
            return Response.text('', status=204)
        else:
            raise NotImplementedError(f"Bad method: {request.method}")

    perms_query = """
        select distinct user_id as id, lookup, value, description
        from group_membership join users
        on group_membership.user_id = users.id
        where group_membership.group_id=?
    """
    users = db.execute(perms_query, (group_id,))
    return Response.html(
        await datasette.render_template(
            "database_management.html", {
                "database": db_name,
                "users": users,
            }, request=request
        )
    )
예제 #13
0
 def login_as_root(datasette, request):
     # Mainly for the latest.datasette.io demo
     if request.method == "POST":
         response = Response.redirect("/")
         response.set_cookie("ds_actor",
                             datasette.sign({"a": {
                                 "id": "root"
                             }}, "actor"))
         return response
     return Response.html("""
         <form action="{}" method="POST">
             <p>
                 <input type="hidden" name="csrftoken" value="{}">
                 <input type="submit" value="Sign in as root user"></p>
         </form>
     """.format(request.path, request.scope["csrftoken"]()))
예제 #14
0
 async def render(self, templates, request, context=None):
     context = context or {}
     template = self.ds.jinja_env.select_template(templates)
     template_context = {
         **context,
         **{
             "database_color":
             self.database_color,
             "select_templates": [
                 f"{'*' if template_name == template.name else ''}{template_name}" for template_name in templates
             ],
         },
     }
     return Response.html(await
                          self.ds.render_template(template,
                                                  template_context,
                                                  request=request,
                                                  view_name=self.name))
예제 #15
0
파일: base.py 프로젝트: qls0ulp/datasette
 async def render(self, templates, request, context):
     template = self.ds.jinja_env.select_template(templates)
     template_context = {
         **context,
         **{
             "database_url":
             self.database_url,
             "database_color":
             self.database_color,
             "select_templates": [
                 "{}{}".format(
                     "*" if template_name == template.name else "", template_name) for template_name in templates
             ],
         },
     }
     return Response.html(await
                          self.ds.render_template(template,
                                                  template_context,
                                                  request=request,
                                                  view_name=self.name))
예제 #16
0
 def render(self, templates, **context):
     template = self.ds.jinja_env.select_template(templates)
     select_templates = [
         "{}{}".format("*" if template_name == template.name else "", template_name)
         for template_name in templates
     ]
     body_scripts = []
     # pylint: disable=no-member
     for script in pm.hook.extra_body_script(
         template=template.name,
         database=context.get("database"),
         table=context.get("table"),
         view_name=self.name,
         datasette=self.ds,
     ):
         body_scripts.append(jinja2.Markup(script))
     return Response.html(
         template.render(
             {
                 **context,
                 **{
                     "app_css_hash": self.ds.app_css_hash(),
                     "select_templates": select_templates,
                     "zip": zip,
                     "body_scripts": body_scripts,
                     "extra_css_urls": self._asset_urls(
                         "extra_css_urls", template, context
                     ),
                     "extra_js_urls": self._asset_urls(
                         "extra_js_urls", template, context
                     ),
                     "format_bytes": format_bytes,
                     "database_url": self.database_url,
                     "database_color": self.database_color,
                 },
             }
         )
     )
예제 #17
0
async def dashboard_view(request, datasette):
    await check_permission_instance(request, datasette)

    config = datasette.plugin_config("datasette-dashboards") or {}
    slug = urllib.parse.unquote(request.url_vars["slug"])
    try:
        dashboard = config[slug]
    except KeyError:
        raise NotFound(f"Dashboard not found: {slug}")

    dbs = set([
        chart["db"] for chart in dashboard["charts"].values() if "db" in chart
    ])
    for db in dbs:
        try:
            database = datasette.get_database(db)
        except KeyError:
            raise NotFound(f"Database does not exist: {db}")
        await check_permission_execute_sql(request, datasette, database)

    options_keys = get_dashboard_filters_keys(request, dashboard)
    query_parameters = get_dashboard_filters(request, options_keys)
    query_string = generate_dashboard_filters_qs(request, options_keys)

    for chart in dashboard["charts"].values():
        fill_chart_query_options(chart, options_keys)

    return Response.html(await datasette.render_template(
        "dashboard_view.html",
        {
            "slug": slug,
            "query_parameters": query_parameters,
            "query_string": query_string,
            "dashboard": dashboard,
        },
    ))
예제 #18
0
async def explorer(datasette, request):
    db_name = request.url_vars["db_name"]
    mbtiles_databases = await detect_mtiles_databases(datasette)
    if db_name not in mbtiles_databases:
        raise NotFound("Not a valid mbtiles database")
    db = datasette.get_database(db_name)
    metadata = {
        row["name"]: row["value"]
        for row in (await db.execute("select name, value from metadata")).rows
    }
    default_latitude = 0
    default_longitude = 0
    default_zoom = 0
    if metadata.get("center") and len(metadata["center"].split(",")) == 3:
        default_longitude, default_latitude, default_zoom = metadata[
            "center"].split(",")
    min_zoom = 0
    max_zoom = 19
    if metadata.get("minzoom"):
        min_zoom = int(metadata["minzoom"])
    if metadata.get("maxzoom"):
        max_zoom = int(metadata["maxzoom"])
    attribution = metadata.get("attribution") or None

    # Provided location data
    lat = float(request.args.get("lat", default_latitude))
    lon = float(request.args.get("lon", default_longitude))
    zoom = int(request.args.get("z", default_zoom))
    if zoom > max_zoom:
        zoom = max_zoom
    if zoom < min_zoom:
        zoom = min_zoom
    x_tile, y_tile = latlon_to_tile_with_adjust(lat, lon, zoom)

    return Response.html(await datasette.render_template(
        "tiles_explorer.html",
        {
            "nojs": request.args.get("nojs") or request.args.get("lat"),
            "metadata": metadata,
            "db_name": db_name,
            "db_path": datasette.urls.database(db_name),
            "default_latitude": default_latitude,
            "default_longitude": default_longitude,
            "default_zoom": default_zoom,
            "min_zoom": min_zoom,
            "max_zoom": max_zoom,
            "attribution": json.dumps(attribution),
            "current_latitude": lat,
            "current_longitude": lon,
            "can_zoom_in": zoom < max_zoom,
            "can_zoom_out": zoom > min_zoom,
            "current_zoom": zoom,
            "current_x": x_tile,
            "current_y": y_tile,
            "compass": {
                "n": tile_to_latlon(x_tile, y_tile - 1, zoom),
                "s": tile_to_latlon(x_tile, y_tile + 1, zoom),
                "e": tile_to_latlon(x_tile + 1, y_tile, zoom),
                "w": tile_to_latlon(x_tile - 1, y_tile, zoom),
            },
        },
    ))
예제 #19
0
 async def csrftoken_form(request, datasette):
     return Response.html(
         await datasette.render_template("csrftoken_form.html", request=request)
     )
예제 #20
0
 async def post(request):
     if request.method == "GET":
         return Response.html(request.scope["csrftoken"]())
     else:
         return Response.json(await request.post_vars())
예제 #21
0
async def csv_importer(scope, receive, datasette, request):
    """
    CSV Importer initiates a CSV import using the CLI tool CSVs-to-SQlite.
    Accepts HTTP POST with form data as follows:

    `csv` should contain the CSV file to be imported

    `database` is the name of the database file to be written to. If blank,
    we will choose a name base on the uploaded file name.

    If `xhr` is set to `1` we will assume a JS client is running and this
    endpoint will return JSON (as opposed to rendering a different HTML
    template without `xhr` set to `1`).

    A valid `csrftoken` needs to be provided.

    Any form input starting with "-" are interpreted as arguments to
    the CLI tool. Such arguments are considered single-toggle arguments
    that don't use any parameters, so "--on true" will be interpreted
    as running the tool with "--on".
    """
    if not await datasette.permission_allowed(
        request.actor, "csv-importer", default=False
    ):
        raise Forbidden("Permission denied for csv-importer")

    plugin_config = datasette.plugin_config(
        "datasette-csv-importer"
    ) or {}
    print("plugin_config", plugin_config)

    db = get_status_database(datasette, plugin_config)
    status_table = get_status_table(plugin_config)

    # We need the ds_request to pass to render_template for CSRF tokens
    ds_request = request

    # We use the Starlette request object to handle file uploads
    starlette_request = Request(scope, receive)
    # If we aren't uploading a new file (POST), show uploader screen
    if starlette_request.method != "POST":
        print("plugin_config", plugin_config)
        return Response.html(
            await datasette.render_template(
                "csv_importer.html", {}, request=ds_request
            )
        )

    formdata = await starlette_request.form()
    csv = formdata["csv"]

    # csv.file is a SpooledTemporaryFile. csv.filename is the filename
    filename = csv.filename
    basename = os.path.splitext(filename)[0]
    if "database" in formdata and formdata["database"]:
        basename = formdata["database"]

    outfile_db = os.path.join(get_dbpath(plugin_config), f"{basename}.db")

    if basename in datasette.databases:
        global_access = await datasette.permission_allowed(
            request.actor, "view-database", default=False
        )
        specific_access = await datasette.permission_allowed(
            request.actor, "view-database", (basename,), default=False
        )
        if not specific_access and not global_access:
            raise Forbidden("view-database access required for existing database!")

    task_id = str(uuid.uuid4())
    def insert_initial_record(conn):
        database = sqlite_utils.Database(conn)
        database[status_table].insert(
            {
                "id": task_id,
                "filename": filename,
                "dbname": basename,
                "started": str(datetime.datetime.utcnow()),
                "completed": None,
                "exitcode": -1,
                "status": "in-progress",
                "message": "Setting up import...",
                "output": None,
            },
            pk="id",
            alter=True,
        )
    await db.execute_write_fn(insert_initial_record, block=True)

    csv_fields = [
        "--primary-key", "--fts", "--index", "--date", "--datetime",
        "--datetime-format"
    ]
    args = []
    for key, value in formdata.items():
        if not key.startswith("-"):
            continue
        # this is a toggle/flag arg with no param
        if value is True or value == "true":
            args.append(key)
            continue
        if not value or value == "false":
            continue
        # we have a columns list field, split it up w/ dupe keys
        # TODO: This screws up when column names have commas in them!
        if "," in value and key in csv_fields:
            for v in re.split(r"/,\s*/", value):
                if not v or not v.strip():
                    continue
                args.append(key)
                args.append(value)
            continue
        args.append(key)
        args.append(value)

    def set_status(conn, message):
        print("Setting status", message)
        status_database = sqlite_utils.Database(conn)
        status_database[status_table].update(
            task_id,
            {
                "message": message,
            },
        )
        print("Successfully set status!")

    # run the command, capture its output
    def run_cli_import(conn):
        set_status(conn, "Running CSV import...")

        exitcode = -1
        output = None
        message = None
        try:
            with tempfile.NamedTemporaryFile() as temp:
                temp.write(csv.file.read())
                temp.flush()

                args.append(temp.name)
                args.append(outfile_db)

                # run the import command, capturing stdout
                with Capturing() as output:
                    exitcode = command.main(
                        args=args, prog_name="cli", standalone_mode=False
                    )
                    if exitcode is not None:
                        exitcode = int(exitcode)
                    # detect a failure to write DB where tool returns success
                    # code this makes it so we don't have to read the
                    # CLI output to figure out if the command succeeded or not
                    if not os.path.exists(outfile_db) and not exitcode:
                        exitcode = -2
        except Exception as e:
            print("Exception", e)
            exitcode = -2
            message = str(e)

        set_status(conn, "Adding database to internal DB list...")
        # Adds this DB to the internel DBs list
        if basename not in datasette.databases:
            print("Adding database", basename)
            datasette.add_database(
                Database(datasette, path=outfile_db, is_mutable=True),
                name=basename,
            )
            # print("Database added successfully!")
            # try:
            #     loop = asyncio.get_running_loop()
            # except RuntimeError:
            #     loop = asyncio.new_event_loop()
            # print("Running schema refresh...")
            # loop.run_until_complete(datasette.refresh_schemas())
            # print("Schema refresh complete!")

        csvspath = get_csvspath(plugin_config)
        if csvspath:
            set_status(conn, "Saving CSV to server directory...")
            csv_db_name = args[-1].replace(".db", "")
            csv_table_name = csv_db_name
            if "-t" in formdata:
                csv_table_name = formdata["-t"]
            if "--table" in formdata:
                csv_table_name = formdata["--table"]
            outfile_csv = os.path.join(
                csvspath, f"{csv_db_name}--{csv_table_name}.csv"
            )
            outfile_args = os.path.join(
                csvspath, f"{csv_db_name}--{csv_table_name}.json"
            )

            # success! save our configs and CSV
            print("Writing CSV", outfile_csv)
            with open(outfile_csv, "wb") as f:
                csv.file.seek(0)
                f.write(csv.file.read())

            print("Writing args to", outfile_args)
            with open(outfile_args, "w") as f:
                f.write(json.dumps(args, indent=2))

        if get_use_live_metadata(plugin_config):
            print("Running live-config integration...")
            set_status(
                conn, "Running live-config plugin integration..."
            )
            # add the permission table, grant access to current user only
            # this will create the DB if not exists
            print("Opening DB:", outfile_db)
            out_db = sqlite_utils.Database(sqlite3.connect(outfile_db))
            try:
                out_db["__metadata"].get("allow")
            except sqlite_utils.db.NotFoundError:
                # don't overwrite, only create
                out_db["__metadata"].insert({
                    "key": "tables",
                    "value": json.dumps({
                        "__metadata": {
                            "hidden": True
                        }
                    }),
                }, pk="key", alter=True, replace=False, ignore=True)

        if get_use_live_permissions(plugin_config):
            print("Setting live-permissions plugin status...")
            set_status(
                conn,
                "Running live-permissions plugin integration..."
            )
            print("Running set_perms_for_live_permissions with basename:", basename)
            set_perms_for_live_permissions(datasette, request.actor, basename)
            print("set_perms_for_live_permissions complete!")

        if not message:
            message = "Import successful!" if not exitcode else "Failure"

        print("Updating status", message)
        status_database = sqlite_utils.Database(conn)
        status_database[status_table].update(
            task_id,
            {
                "completed": str(datetime.datetime.utcnow()),
                "exitcode": exitcode,
                "status": "completed",
                "message": message,
                "output": "\n".join(output),
            },
        )

    await db.execute_write_fn(run_cli_import)

    if formdata.get("xhr"):
        return Response.json(
            {
                "url": datasette.urls.path("/{filename}".format(
                    filename=quote_plus(filename),
                )),
                "status_database_path": quote_plus(db.name),
                "status_table": quote_plus(status_table),
                "task_id": task_id,
            }
        )

    return Response.html(
        await datasette.render_template(
            "csv_importer_done.html", {
                "filename": filename,
                "task_id": task_id,
            },
        )
    )
예제 #22
0
def test_response_html():
    response = Response.html("Hello from HTML")
    assert 200 == response.status
    assert "Hello from HTML" == response.body
    assert "text/html; charset=utf-8" == response.content_type
예제 #23
0
 async def render_message(datasette, request):
     return Response.html(await
                          datasette.render_template("render_message.html",
                                                    request=request))
예제 #24
0
 def add_message(datasette, request):
     datasette.add_message(request, "Hello from messages")
     return Response.html("Added message")
예제 #25
0
 def not_async():
     return Response.html("This was not async")
예제 #26
0
async def import_table(request, datasette):
    if not await datasette.permission_allowed(
            request.actor, "import-table", default=False):
        raise Forbidden("Permission denied for import-table")

    mutable_databases = [
        db for db in datasette.databases.values()
        if db.is_mutable and db.name != "_internal"
    ]
    error = None

    if request.method == "POST":
        post_vars = await request.post_vars()
        url = post_vars.get("url")
        try:
            table_name, rows, pks, total, next_page = await load_first_page(url
                                                                            )
        except Exception as e:
            error = str(e)
        else:
            primary_key = (pks[0] if len(pks) == 1 else pks) or "rowid"

            def start_table(conn):
                db = sqlite_utils.Database(conn)
                with db.conn:
                    db[table_name].insert_all(rows, pk=primary_key)

            database = datasette.get_database(post_vars.get("database"))
            await database.execute_write_fn(start_table, block=True)

            # This is a bit of a mess. My first implementation of this worked
            # by starting a function on the write thread which fetched each
            # page in turn and wrote them to the database synchronously.
            #
            # Problem: the write thread can only run one function at a time -
            # and for a large number of rows this function blocked anyone
            # else from scheduling a write until it had finished.
            #
            # This more complex version instead runs the paginated HTTP gets
            # in an asyncio task, and has that task schedule a write operation
            # for each individual batch of rows that it receives.

            def do_the_rest(url):
                async def inner_async():
                    nonlocal url

                    def row_writer(rows):
                        def inner(conn):
                            db = sqlite_utils.Database(conn)
                            with db.conn:
                                db[table_name].insert_all(rows)

                        return inner

                    while url:
                        async with httpx.AsyncClient() as client:
                            response = await client.get(url)
                            data = response.json()
                            if data.get("rows"):
                                await database.execute_write_fn(
                                    row_writer(data["rows"]))
                            url = data.get("next_url")

                return inner_async()

            if next_page:
                asyncio.ensure_future(do_the_rest(next_page))

            return Response.redirect("/{}/{}?_import_expected_rows={}".format(
                database.name, quote_plus(table_name), total))

    return Response.html(await datasette.render_template(
        "datasette_import_table.html",
        {
            "databases": [m.name for m in mutable_databases],
            "error": error,
            "database": request.args.get("database"),
        },
        request=request,
    ))
예제 #27
0
async def view_graphql(request, datasette):
    if request.method == "OPTIONS":
        return Response.text("ok",
                             headers=CORS_HEADERS if datasette.cors else {})

    body = await post_body(request)
    database = request.url_vars.get("database")

    try:
        datasette.get_database(database)
    except KeyError:
        raise NotFound("Database does not exist")

    if not body and "text/html" in request.headers.get("accept", ""):
        return Response.html(
            await datasette.render_template("graphiql.html", {
                "database": database,
            },
                                            request=request),
            headers=CORS_HEADERS if datasette.cors else {},
        )

    schema = await schema_for_database_via_cache(datasette, database=database)

    if request.args.get("schema"):
        return Response.text(print_schema(schema))

    incoming = {}
    if body:
        incoming = json.loads(body)
        query = incoming.get("query")
        variables = incoming.get("variables")
        operation_name = incoming.get("operationName")
    else:
        query = request.args.get("query")
        variables = request.args.get("variables", "")
        if variables:
            variables = json.loads(variables)
        operation_name = request.args.get("operationName")

    if not query:
        return Response.json(
            {"error": "Missing query"},
            status=400,
            headers=CORS_HEADERS if datasette.cors else {},
        )

    config = datasette.plugin_config("datasette-graphql") or {}
    context = {
        "time_started":
        time.monotonic(),
        "time_limit_ms":
        config.get("time_limit_ms") or DEFAULT_TIME_LIMIT_MS,
        "num_queries_executed":
        0,
        "num_queries_limit":
        config.get("num_queries_limit") or DEFAULT_NUM_QUERIES_LIMIT,
    }

    result = await graphql(
        schema,
        query,
        operation_name=operation_name,
        variable_values=variables,
        context_value=context,
        executor=AsyncioExecutor(),
        return_promise=True,
    )
    response = {"data": result.data}
    if result.errors:
        response["errors"] = [format_error(error) for error in result.errors]

    return Response.json(
        response,
        status=200 if not result.errors else 500,
        headers=CORS_HEADERS if datasette.cors else {},
    )
예제 #28
0
async def index(datasette):
    return Response.html(await datasette.render_template(
        "tiles_index.html",
        {"mbtiles_databases": await detect_mtiles_databases(datasette)},
    ))
예제 #29
0
async def indieauth_page(request, datasette, status=200, error=None):
    from datasette.utils.asgi import Response

    urls = Urls(request, datasette)

    if request.method == "POST":
        while True:  # So I can use 'break'
            post = await request.post_vars()
            me = post.get("me")
            if me:
                me = canonicalize_url(me)

            if not me or not verify_profile_url(me):
                error = "Invalid IndieAuth identifier"
                break

            # Start the auth process
            try:
                me, authorization_endpoint, token_endpoint = await discover_endpoints(
                    me
                )
            except httpx.RequestError as ex:
                error = "Invalid IndieAuth identifier: {}".format(ex)
                break
            if not authorization_endpoint:
                error = "Invalid IndieAuth identifier - no authorization_endpoint found"
                break

            authorization_url, state, verifier = build_authorization_url(
                authorization_endpoint=authorization_endpoint,
                client_id=urls.client_id,
                redirect_uri=urls.redirect_uri,
                me=me,
                signing_function=lambda x: datasette.sign(x, DATASETTE_INDIEAUTH_STATE),
            )
            response = Response.redirect(authorization_url)
            response.set_cookie(
                "ds_indieauth",
                datasette.sign(
                    {
                        "v": verifier,
                        "m": me,
                    },
                    DATASETTE_INDIEAUTH_COOKIE,
                ),
            )
            return response

    return Response.html(
        await datasette.render_template(
            "indieauth.html",
            {
                "error": error,
                "title": datasette.metadata("title") or "Datasette",
                "absolute_instance_url": datasette.absolute_url(
                    request, datasette.urls.instance()
                ),
            },
            request=request,
        ),
        status=status,
    )