def test_resolve_table_and_format( table_and_format, expected_table, expected_format ): actual_table, actual_format = utils.resolve_table_and_format( table_and_format, table_exists ) assert expected_table == actual_table assert expected_format == actual_format
def resolve_db_name(self, db_name, **kwargs): databases = self.ds.inspect() hash = None name = None if "-" in db_name: # Might be name-and-hash, or might just be # a name with a hyphen in it name, hash = db_name.rsplit("-", 1) if name not in databases: # Try the whole name name = db_name hash = None else: name = db_name # Verify the hash try: info = databases[name] except KeyError: raise NotFound("Database not found: {}".format(name)) expected = info["hash"][:HASH_LENGTH] if expected != hash: if "table_and_format" in kwargs: table, _format = resolve_table_and_format( table_and_format=urllib.parse.unquote_plus( kwargs["table_and_format"] ), table_exists=lambda t: self.ds.table_exists(name, t) ) kwargs["table"] = table if _format: kwargs["as_format"] = ".{}".format(_format) elif "table" in kwargs: kwargs["table"] = urllib.parse.unquote_plus( kwargs["table"] ) should_redirect = "/{}-{}".format(name, expected) if "table" in kwargs: should_redirect += "/" + urllib.parse.quote_plus( kwargs["table"] ) if "pk_path" in kwargs: should_redirect += "/" + kwargs["pk_path"] if "as_format" in kwargs: should_redirect += kwargs["as_format"] if "as_db" in kwargs: should_redirect += kwargs["as_db"] return name, expected, should_redirect return name, expected, None
async def view_get(self, request, name, hash, **kwargs): # If ?_format= is provided, use that as the format _format = request.args.get("_format", None) if not _format: _format = (kwargs.pop("as_format", None) or "").lstrip(".") if "table_and_format" in kwargs: table, _ext_format = resolve_table_and_format( table_and_format=urllib.parse.unquote_plus( kwargs["table_and_format"]), table_exists=lambda t: self.ds.table_exists(name, t)) _format = _format or _ext_format kwargs["table"] = table del kwargs["table_and_format"] elif "table" in kwargs: kwargs["table"] = urllib.parse.unquote_plus(kwargs["table"]) if _format == "csv": return await self.as_csv(request, name, hash, **kwargs) if _format is None: # HTML views default to expanding all forign key labels kwargs['default_labels'] = True extra_template_data = {} start = time.time() status_code = 200 templates = [] try: response_or_template_contexts = await self.data( request, name, hash, **kwargs) if isinstance(response_or_template_contexts, response.HTTPResponse): return response_or_template_contexts else: data, extra_template_data, templates = response_or_template_contexts except InterruptedError as e: raise DatasetteError(""" SQL query took too long. The time limit is controlled by the <a href="https://datasette.readthedocs.io/en/stable/config.html#sql-time-limit-ms">sql_time_limit_ms</a> configuration option. """, title="SQL Interrupted", status=400, messagge_is_html=True) except (sqlite3.OperationalError, InvalidSql) as e: raise DatasetteError(str(e), title="Invalid SQL", status=400) except (sqlite3.OperationalError) as e: raise DatasetteError(str(e)) except DatasetteError: raise end = time.time() data["query_ms"] = (end - start) * 1000 for key in ("source", "source_url", "license", "license_url"): value = self.ds.metadata.get(key) if value: data[key] = value if _format in ("json", "jsono"): # Special case for .jsono extension - redirect to _shape=objects if _format == "jsono": return self.redirect( request, path_with_added_args( request, {"_shape": "objects"}, path=request.path.rsplit(".jsono", 1)[0] + ".json", ), forward_querystring=False, ) # Handle the _json= parameter which may modify data["rows"] json_cols = [] if "_json" in request.args: json_cols = request.args["_json"] if json_cols and "rows" in data and "columns" in data: data["rows"] = convert_specific_columns_to_json( data["rows"], data["columns"], json_cols, ) # unless _json_infinity=1 requested, replace infinity with None if "rows" in data and not value_as_boolean( request.args.get("_json_infinity", "0")): data["rows"] = [remove_infinites(row) for row in data["rows"]] # Deal with the _shape option shape = request.args.get("_shape", "arrays") if shape == "arrayfirst": data = [row[0] for row in data["rows"]] elif shape in ("objects", "object", "array"): columns = data.get("columns") rows = data.get("rows") if rows and columns: data["rows"] = [dict(zip(columns, row)) for row in rows] if shape == "object": error = None if "primary_keys" not in data: error = "_shape=object is only available on tables" else: pks = data["primary_keys"] if not pks: error = "_shape=object not available for tables with no primary keys" else: object_rows = {} for row in data["rows"]: pk_string = path_from_row_pks( row, pks, not pks) object_rows[pk_string] = row data = object_rows if error: data = { "ok": False, "error": error, "database": name, "database_hash": hash, } elif shape == "array": data = data["rows"] elif shape == "arrays": pass else: status_code = 400 data = { "ok": False, "error": "Invalid _shape: {}".format(shape), "status": 400, "title": None, } headers = {} if self.ds.cors: headers["Access-Control-Allow-Origin"] = "*" r = response.HTTPResponse( json.dumps(data, cls=CustomJSONEncoder), status=status_code, content_type="application/json", headers=headers, ) else: extras = {} if callable(extra_template_data): extras = extra_template_data() if asyncio.iscoroutine(extras): extras = await extras else: extras = extra_template_data url_labels_extra = {} if data.get("expandable_columns"): url_labels_extra = {"_labels": "on"} url_csv_args = {"_size": "max", **url_labels_extra} url_csv = path_with_format(request, "csv", url_csv_args) url_csv_path = url_csv.split('?')[0] context = { **data, **extras, **{ "url_json": path_with_format(request, "json", { **url_labels_extra, }), "url_csv": url_csv, "url_csv_path": url_csv_path, "url_csv_args": url_csv_args, "extra_css_urls": self.ds.extra_css_urls(), "extra_js_urls": self.ds.extra_js_urls(), "datasette_version": __version__, "config": self.ds.config_dict(), } } if "metadata" not in context: context["metadata"] = self.ds.metadata r = self.render(templates, **context) r.status = status_code # Set far-future cache expiry if self.ds.cache_headers: ttl = request.args.get("_ttl", None) if ttl is None or not ttl.isdigit(): ttl = self.ds.config("default_cache_ttl") else: ttl = int(ttl) if ttl == 0: ttl_header = 'no-cache' else: ttl_header = 'max-age={}'.format(ttl) r.headers["Cache-Control"] = ttl_header r.headers["Referrer-Policy"] = "no-referrer" return r