async def return_specific_status(request, code): if code in [404, 503]: abort(code) raise NotFound('Not Found')
async def data(self, request, name, hash, table, default_labels=False, _next=None, _size=None): canned_query = self.ds.get_canned_query(name, table) if canned_query is not None: return await self.custom_sql( request, name, hash, canned_query["sql"], metadata=canned_query, editable=False, canned_query=table, ) is_view = bool(await self.ds.get_view_definition(name, table)) info = self.ds.inspect() table_info = info[name]["tables"].get(table) or {} if not is_view and not table_info: raise NotFound("Table not found: {}".format(table)) pks = table_info.get("primary_keys") or [] use_rowid = not pks and not is_view if use_rowid: select = "rowid, *" order_by = "rowid" order_by_pks = "rowid" else: select = "*" order_by_pks = ", ".join([escape_sqlite(pk) for pk in pks]) order_by = order_by_pks if is_view: order_by = "" # We roll our own query_string decoder because by default Sanic # drops anything with an empty value e.g. ?name__exact= args = RequestParameters( urllib.parse.parse_qs(request.query_string, keep_blank_values=True)) # Special args start with _ and do not contain a __ # That's so if there is a column that starts with _ # it can still be queried using ?_col__exact=blah special_args = {} special_args_lists = {} other_args = {} for key, value in args.items(): if key.startswith("_") and "__" not in key: special_args[key] = value[0] special_args_lists[key] = value else: other_args[key] = value[0] # Handle ?_filter_column and redirect, if present redirect_params = filters_should_redirect(special_args) if redirect_params: return self.redirect( request, path_with_added_args(request, redirect_params), forward_querystring=False, ) # Spot ?_sort_by_desc and redirect to _sort_desc=(_sort) if "_sort_by_desc" in special_args: return self.redirect( request, path_with_added_args( request, { "_sort_desc": special_args.get("_sort"), "_sort_by_desc": None, "_sort": None, }, ), forward_querystring=False, ) table_metadata = self.table_metadata(name, table) units = table_metadata.get("units", {}) filters = Filters(sorted(other_args.items()), units, ureg) where_clauses, params = filters.build_where_clauses() # _search support: fts_table = info[name]["tables"].get(table, {}).get("fts_table") search_args = dict(pair for pair in special_args.items() if pair[0].startswith("_search")) search_descriptions = [] search = "" if fts_table and search_args: if "_search" in search_args: # Simple ?_search=xxx search = search_args["_search"] where_clauses.append( "rowid in (select rowid from {fts_table} where {fts_table} match :search)" .format(fts_table=escape_sqlite(fts_table), )) search_descriptions.append( 'search matches "{}"'.format(search)) params["search"] = search else: # More complex: search against specific columns valid_columns = set(info[name]["tables"][fts_table]["columns"]) for i, (key, search_text) in enumerate(search_args.items()): search_col = key.split("_search_", 1)[1] if search_col not in valid_columns: raise DatasetteError("Cannot search by that column", status=400) where_clauses.append( "rowid in (select rowid from {fts_table} where {search_col} match :search_{i})" .format(fts_table=escape_sqlite(fts_table), search_col=escape_sqlite(search_col), i=i)) search_descriptions.append( 'search column "{}" matches "{}"'.format( search_col, search_text)) params["search_{}".format(i)] = search_text table_rows_count = None sortable_columns = set() if not is_view: table_rows_count = table_info["count"] sortable_columns = self.sortable_columns_for_table( name, table, use_rowid) # Allow for custom sort order sort = special_args.get("_sort") if sort: if sort not in sortable_columns: raise DatasetteError("Cannot sort table by {}".format(sort)) order_by = escape_sqlite(sort) sort_desc = special_args.get("_sort_desc") if sort_desc: if sort_desc not in sortable_columns: raise DatasetteError( "Cannot sort table by {}".format(sort_desc)) if sort: raise DatasetteError( "Cannot use _sort and _sort_desc at the same time") order_by = "{} desc".format(escape_sqlite(sort_desc)) from_sql = "from {table_name} {where}".format( table_name=escape_sqlite(table), where=("where {} ".format(" and ".join(where_clauses))) if where_clauses else "", ) # Store current params and where_clauses for later: from_sql_params = dict(**params) from_sql_where_clauses = where_clauses[:] count_sql = "select count(*) {}".format(from_sql) _next = _next or special_args.get("_next") offset = "" if _next: if is_view: # _next is an offset offset = " offset {}".format(int(_next)) else: components = urlsafe_components(_next) # If a sort order is applied, the first of these is the sort value if sort or sort_desc: sort_value = components[0] # Special case for if non-urlencoded first token was $null if _next.split(",")[0] == "$null": sort_value = None components = components[1:] # Figure out the SQL for next-based-on-primary-key first next_by_pk_clauses = [] if use_rowid: next_by_pk_clauses.append("rowid > :p{}".format( len(params))) params["p{}".format(len(params))] = components[0] else: # Apply the tie-breaker based on primary keys if len(components) == len(pks): param_len = len(params) next_by_pk_clauses.append( compound_keys_after_sql(pks, param_len)) for i, pk_value in enumerate(components): params["p{}".format(param_len + i)] = pk_value # Now add the sort SQL, which may incorporate next_by_pk_clauses if sort or sort_desc: if sort_value is None: if sort_desc: # Just items where column is null ordered by pk where_clauses.append( "({column} is null and {next_clauses})".format( column=escape_sqlite(sort_desc), next_clauses=" and ".join( next_by_pk_clauses), )) else: where_clauses.append( "({column} is not null or ({column} is null and {next_clauses}))" .format( column=escape_sqlite(sort), next_clauses=" and ".join( next_by_pk_clauses), )) else: where_clauses.append( "({column} {op} :p{p}{extra_desc_only} or ({column} = :p{p} and {next_clauses}))" .format( column=escape_sqlite(sort or sort_desc), op=">" if sort else "<", p=len(params), extra_desc_only="" if sort else " or {column2} is null".format( column2=escape_sqlite(sort or sort_desc)), next_clauses=" and ".join(next_by_pk_clauses), )) params["p{}".format(len(params))] = sort_value order_by = "{}, {}".format(order_by, order_by_pks) else: where_clauses.extend(next_by_pk_clauses) where_clause = "" if where_clauses: where_clause = "where {} ".format(" and ".join(where_clauses)) if order_by: order_by = "order by {} ".format(order_by) # _group_count=col1&_group_count=col2 group_count = special_args_lists.get("_group_count") or [] if group_count: sql = 'select {group_cols}, count(*) as "count" from {table_name} {where} group by {group_cols} order by "count" desc limit 100'.format( group_cols=", ".join('"{}"'.format(group_count_col) for group_count_col in group_count), table_name=escape_sqlite(table), where=where_clause, ) return await self.custom_sql(request, name, hash, sql, editable=True) extra_args = {} # Handle ?_size=500 page_size = _size or request.raw_args.get("_size") if page_size: if page_size == "max": page_size = self.ds.max_returned_rows try: page_size = int(page_size) if page_size < 0: raise ValueError except ValueError: raise DatasetteError("_size must be a positive integer", status=400) if page_size > self.ds.max_returned_rows: raise DatasetteError("_size must be <= {}".format( self.ds.max_returned_rows), status=400) extra_args["page_size"] = page_size else: page_size = self.ds.page_size sql = "select {select} from {table_name} {where}{order_by}limit {limit}{offset}".format( select=select, table_name=escape_sqlite(table), where=where_clause, order_by=order_by, limit=page_size + 1, offset=offset, ) if request.raw_args.get("_timelimit"): extra_args["custom_time_limit"] = int( request.raw_args["_timelimit"]) results = await self.ds.execute(name, sql, params, truncate=True, **extra_args) # facets support facet_size = self.ds.config["default_facet_size"] metadata_facets = table_metadata.get("facets", []) facets = metadata_facets[:] if request.args.get("_facet") and not self.ds.config["allow_facet"]: raise DatasetteError("_facet= is not allowed", status=400) try: facets.extend(request.args["_facet"]) except KeyError: pass facet_results = {} facets_timed_out = [] for column in facets: if _next: continue facet_sql = """ select {col} as value, count(*) as count {from_sql} {and_or_where} {col} is not null group by {col} order by count desc limit {limit} """.format( col=escape_sqlite(column), from_sql=from_sql, and_or_where='and' if from_sql_where_clauses else 'where', limit=facet_size + 1, ) try: facet_rows_results = await self.ds.execute( name, facet_sql, params, truncate=False, custom_time_limit=self.ds.config["facet_time_limit_ms"], ) facet_results_values = [] facet_results[column] = { "name": column, "results": facet_results_values, "truncated": len(facet_rows_results) > facet_size, } facet_rows = facet_rows_results.rows[:facet_size] # Attempt to expand foreign keys into labels values = [row["value"] for row in facet_rows] expanded = (await self.expand_foreign_keys(name, table, column, values)) for row in facet_rows: selected = str(other_args.get(column)) == str(row["value"]) if selected: toggle_path = path_with_removed_args( request, {column: str(row["value"])}) else: toggle_path = path_with_added_args( request, {column: row["value"]}) facet_results_values.append({ "value": row["value"], "label": expanded.get((column, row["value"]), row["value"]), "count": row["count"], "toggle_url": self.absolute_url(request, toggle_path), "selected": selected, }) except InterruptedError: facets_timed_out.append(column) columns = [r[0] for r in results.description] rows = list(results.rows) filter_columns = columns[:] if use_rowid and filter_columns[0] == "rowid": filter_columns = filter_columns[1:] # Expand labeled columns if requested expanded_columns = [] expandable_columns = self.expandable_columns(name, table) columns_to_expand = None try: all_labels = value_as_boolean(special_args.get("_labels", "")) except ValueError: all_labels = default_labels # Check for explicit _label= if "_label" in request.args: columns_to_expand = request.args["_label"] if columns_to_expand is None and all_labels: # expand all columns with foreign keys columns_to_expand = [fk["column"] for fk, _ in expandable_columns] if columns_to_expand: expanded_labels = {} for fk, label_column in expandable_columns: column = fk["column"] if column not in columns_to_expand: continue expanded_columns.append(column) # Gather the values column_index = columns.index(column) values = [row[column_index] for row in rows] # Expand them expanded_labels.update(await self.expand_foreign_keys( name, table, column, values)) if expanded_labels: # Rewrite the rows new_rows = [] for row in rows: new_row = CustomRow(columns) for column in row.keys(): value = row[column] if (column, value) in expanded_labels: new_row[column] = { 'value': value, 'label': expanded_labels[(column, value)] } else: new_row[column] = value new_rows.append(new_row) rows = new_rows # Pagination next link next_value = None next_url = None if len(rows) > page_size and page_size > 0: if is_view: next_value = int(_next or 0) + page_size else: next_value = path_from_row_pks(rows[-2], pks, use_rowid) # If there's a sort or sort_desc, add that value as a prefix if (sort or sort_desc) and not is_view: prefix = rows[-2][sort or sort_desc] if prefix is None: prefix = "$null" else: prefix = urllib.parse.quote_plus(str(prefix)) next_value = "{},{}".format(prefix, next_value) added_args = {"_next": next_value} if sort: added_args["_sort"] = sort else: added_args["_sort_desc"] = sort_desc else: added_args = {"_next": next_value} next_url = self.absolute_url( request, path_with_replaced_args(request, added_args)) rows = rows[:page_size] # Number of filtered rows in whole set: filtered_table_rows_count = None if count_sql: try: count_rows = list(await self.ds.execute(name, count_sql, from_sql_params)) filtered_table_rows_count = count_rows[0][0] except InterruptedError: pass # Detect suggested facets suggested_facets = [] if self.ds.config["suggest_facets"] and self.ds.config[ "allow_facet"]: for facet_column in columns: if facet_column in facets: continue if _next: continue if not self.ds.config["suggest_facets"]: continue suggested_facet_sql = ''' select distinct {column} {from_sql} {and_or_where} {column} is not null limit {limit} '''.format(column=escape_sqlite(facet_column), from_sql=from_sql, and_or_where='and' if from_sql_where_clauses else 'where', limit=facet_size + 1) distinct_values = None try: distinct_values = await self.ds.execute( name, suggested_facet_sql, from_sql_params, truncate=False, custom_time_limit=self.ds. config["facet_suggest_time_limit_ms"], ) num_distinct_values = len(distinct_values) if (num_distinct_values and num_distinct_values > 1 and num_distinct_values <= facet_size and num_distinct_values < filtered_table_rows_count): suggested_facets.append({ 'name': facet_column, 'toggle_url': self.absolute_url( request, path_with_added_args( request, {"_facet": facet_column})), }) except InterruptedError: pass # human_description_en combines filters AND search, if provided human_description_en = filters.human_description_en( extra=search_descriptions) if sort or sort_desc: sorted_by = "sorted by {}{}".format( (sort or sort_desc), " descending" if sort_desc else "") human_description_en = " ".join( [b for b in [human_description_en, sorted_by] if b]) async def extra_template(): display_columns, display_rows = await self.display_columns_and_rows( name, table, results.description, rows, link_column=not is_view, truncate_cells=self.ds.config["truncate_cells_html"], ) metadata = self.ds.metadata.get("databases", {}).get(name, {}).get("tables", {}).get(table, {}) self.ds.update_with_inherited_metadata(metadata) return { "database_hash": hash, "supports_search": bool(fts_table), "search": search or "", "use_rowid": use_rowid, "filters": filters, "display_columns": display_columns, "filter_columns": filter_columns, "display_rows": display_rows, "facets_timed_out": facets_timed_out, "sorted_facet_results": sorted(facet_results.values(), key=lambda f: (len(f["results"]), f["name"]), reverse=True), "facet_hideable": lambda facet: facet not in metadata_facets, "is_sortable": any(c["sortable"] for c in display_columns), "path_with_replaced_args": path_with_replaced_args, "path_with_removed_args": path_with_removed_args, "append_querystring": append_querystring, "request": request, "sort": sort, "sort_desc": sort_desc, "disable_sort": is_view, "custom_rows_and_columns_templates": [ "_rows_and_columns-{}-{}.html".format( to_css_class(name), to_css_class(table)), "_rows_and_columns-table-{}-{}.html".format( to_css_class(name), to_css_class(table)), "_rows_and_columns.html", ], "metadata": metadata, "view_definition": await self.ds.get_view_definition(name, table), "table_definition": await self.ds.get_table_definition(name, table), } return { "database": name, "table": table, "is_view": is_view, "human_description_en": human_description_en, "rows": rows[:page_size], "truncated": results.truncated, "table_rows_count": table_rows_count, "filtered_table_rows_count": filtered_table_rows_count, "expanded_columns": expanded_columns, "expandable_columns": expandable_columns, "columns": columns, "primary_keys": pks, "units": units, "query": { "sql": sql, "params": params }, "facet_results": facet_results, "suggested_facets": suggested_facets, "next": next_value and str(next_value) or None, "next_url": next_url, }, extra_template, ( "table-{}-{}.html".format(to_css_class(name), to_css_class(table)), "table.html", )
def handler_3(request): raise NotFound("OK")
async def trials_raw(request): global TRIALS n = request.args.get('n') tid = request.args.get('tid') label = request.args.get('label') if all(x is None for x in (n, tid, label)): n = 0 if 'refresh' in request.args: _refresh_trials() if len(TRIALS.trials) == 0: redirect(f"/status") tr = None trials = list(TRIALS.trials) trials.sort(key=lambda x: x['exp_key'], reverse=True) if 'allgens' in request.args: all_gens = True else: all_gens = False trials = [x for x in trials if x['exp_key'] == trials[0]['exp_key']] losses = [] for tr in trials: loss = min( list( zip(*tr['result'].get('validation_stats', [(0, np.inf, 0, 0)])))[1]) #loss = tr['result'].get('loss', np.inf) if tr['state'] != 2: # Not Done loss = np.inf if loss in losses: losses.append(np.inf) else: losses.append(loss) idx_list = np.argsort([x if x is not None else np.inf for x in losses]) max_idx = np.argwhere(np.isfinite( np.array(losses)[idx_list])).flatten().max() if n is not None: n = int(n) if n > max_idx: return redirect(f"/best-trials/{max_idx}") tr = trials[idx_list[n]] elif tid is not None: tid = int(tid) for t in trials: if t['tid'] == tid: tr = t break elif label is not None: for t in trials: params = hyperopt.space_eval( SEARCH_SPACE, {k: v[0] for k, v in t['misc'].get('vals', {}).items()}) hsh = hashlib.sha1('hyperopt'.encode()) hsh.update(repr(sorted(params.items())).encode()) if label == 'hyperopt_' + hsh.hexdigest()[:12]: tr = t break if tr is None: return NotFound() return text(pformat(tr))
async def get_current_best(request): global TRIALS, TRIALS_REFRESHED n = request.args.get('n') tid = request.args.get('tid') label = request.args.get('label') gen = request.args.get('gen') if all(x is None for x in (n, tid, label)): n = 0 if 'refresh' in request.args: TRIALS.refresh() TRIALS_REFRESHED = datetime.now() if len(TRIALS.trials) == 0: redirect(f"/status") tr = None trials = list(TRIALS.trials) trials.sort(key=lambda x: x['exp_key'], reverse=True) if gen is not None: gen = int(gen) trials = [] for t in TRIALS.trials: try: epoch = int(t['result'].get('training_loss_hist', [( -1, np.inf)])[-1][0] + 1e-8) except: epoch = -1 if epoch == gen and t['exp_key'] == f'covid-{gen}': trials.append(t) all_gens = False trials = [x for x in trials if x['exp_key'] == trials[0]['exp_key']] losses = [] for tr in trials: loss = min( list( zip(*tr['result'].get('validation_stats', [(0, np.inf, 0, 0)])))[1]) #loss = tr['result'].get('loss', np.inf) if tr['state'] != 2: # Not Done loss = np.inf if loss in losses: losses.append(np.inf) else: losses.append(loss) idx_list = np.argsort([x if x is not None else np.inf for x in losses]) max_idx = np.argwhere(np.isfinite( np.array(losses)[idx_list])).flatten().max() button_suffix = '' if gen is None else f'gen={gen}' if n is not None: n = int(n) if n > max_idx: return redirect(f"/best-trials/?n={max_idx}&{button_suffix}") tr = trials[idx_list[n]] elif tid is not None: tid = int(tid) for t in trials: if t['tid'] == tid: tr = t break elif label is not None: for t in trials: params = hyperopt.space_eval( SEARCH_SPACE, {k: v[0] for k, v in t['misc'].get('vals', {}).items()}) hsh = hashlib.sha1('hyperopt'.encode()) hsh.update(repr(sorted(params.items())).encode()) if label == 'hyperopt_' + hsh.hexdigest()[:12]: tr = t break if tr is None: return NotFound() for i, idx in enumerate(idx_list): if trials[idx] == tr: n = i break if n is None: n = 0 if 'training_loss_hist' in tr['result']: fig = get_performance_plots(tr['result']['training_loss_hist'], tr['result']['validation_stats'], tr['result'].get('learning_rates')) img = fig_to_base64(fig, close=True).decode('utf-8') stats = get_performance_stats(tr['result']['validation_stats']) else: img = '' stats = {'epoch': [0]} params = hyperopt.space_eval( SEARCH_SPACE, {k: v[0] for k, v in tr['misc'].get('vals', {}).items()}) hsh = hashlib.sha1('hyperopt'.encode()) hsh.update(repr(sorted(params.items())).encode()) label = 'hyperopt_' + hsh.hexdigest()[:12] return html(f""" <html> <head><meta name="viewport" content="width=device-width, initial-scale=1"/></head> <body> <link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.4.1/css/bootstrap.min.css"> <link href="https://stackpath.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.min.css" rel="stylesheet" integrity="sha384-wvfXpqpZZVQGK6TAh5PVlGOfQNHSoD2xbE+QkPxCAFlNEevoEH3Sl0sibVcOQVnN" crossorigin="anonymous"> <div> <h3>Run Details: #{n}/{max_idx}</h3> <div class="col-md-12 mb-4 mt-4 text-center"> {make_button(f"/status", "server", text="Job Status")} <br><br> {make_button(f"/best-trials/?n=0&{button_suffix}", "fast-backward", n==0)} {make_button(f"/best-trials/?n={n-1}&{button_suffix}", "chevron-left", n == 0)} {make_button(f"/best-trials/?tid={tr['tid']}&refresh=True&{button_suffix}", "refresh")} {make_button(f"/best-trials/?n={n+1}&{button_suffix}", "chevron-right", n == max_idx)} {make_button(f"/best-trials/?n={max_idx}&{button_suffix}", "fast-forward", n == max_idx)} </div> <i>Data refreshed {humanize.naturaltime(TRIALS_REFRESHED)}</i><br/> <br> <table class="table table-striped table-sm w-auto ml-1"> <tbody> <tr><th>Pass</th><td>{tr["exp_key"]}</td></tr> <tr><th>Label</th><td>{label}</td></tr> <tr><th>Task ID</th><td>{tr['tid']}</td></tr> <tr><th>Raw Loss</th><td>{tr['result'].get('validation_stats', [(0,np.inf, 0, 0)])[-1][1]:0.4f}</td></tr> <tr><th>Adj Loss</th><td>{'{0:0.4f}'.format(tr['result'].get('loss',np.inf))} <tr><th>Epoch</th><td>{stats['epoch'][-1]:0.0f}</td></tr> </tbody> </table> <div class="col-md-12 mb-4 mt-4 text-center"> {make_button(f"/best-trials/raw?tid=" + str(tr['tid']), "code", text="Full JSON Details")} {make_button(f"/delete-trial/{tr['_id']}", 'trash', text='Delete Trial')} </div> <img src="data:image/png;base64, {img}" class="img-fluid"/> <table class="table table-striped table-sm w-auto ml-1"> <thead class="thead-light"><th>stat</th><th>{'</th><th>'.join(MODE_NAMES)}</th></thead> <tbody> {''.join('<tr><th>' + k + '</th><td>' + '</td><td>'.join( '{0:0.3f}'.format(v) for v in vals[-1] ) + '</td></tr>' for k,vals in stats.items() if k != 'epoch' )} </tbody></table> <h4>Parameters</h4> <table class="table table-sm w-auto ml-1" > <tr>{'</tr><tr>'.join('<th>{0}</th><td>{1}</td>'.format(k,v) for k,v in params.items())}</tr> </table> </body></html> """)
async def first_or_404(self, *args, **kwargs): rv = await self.first(*args, **kwargs) if rv is None: raise NotFound('No such data') return rv
async def view_get(self, request, database, hash, correct_hash_provided, **kwargs): _format, kwargs = await self.get_format(request, database, kwargs) if _format == "csv": return await self.as_csv(request, database, hash, **kwargs) if _format is None: # HTML views default to expanding all foriegn key labels kwargs["default_labels"] = True extra_template_data = {} start = time.time() status_code = 200 templates = [] try: response_or_template_contexts = await self.data( request, database, hash, **kwargs ) if isinstance(response_or_template_contexts, response.HTTPResponse): return response_or_template_contexts else: data, extra_template_data, templates = response_or_template_contexts except QueryInterrupted: raise DatasetteError( """ SQL query took too long. The time limit is controlled by the <a href="https://datasette.readthedocs.io/en/stable/config.html#sql-time-limit-ms">sql_time_limit_ms</a> configuration option. """, title="SQL Interrupted", status=400, messagge_is_html=True, ) except (sqlite3.OperationalError, InvalidSql) as e: raise DatasetteError(str(e), title="Invalid SQL", status=400) except (sqlite3.OperationalError) as e: raise DatasetteError(str(e)) except DatasetteError: raise end = time.time() data["query_ms"] = (end - start) * 1000 for key in ("source", "source_url", "license", "license_url"): value = self.ds.metadata(key) if value: data[key] = value # Special case for .jsono extension - redirect to _shape=objects if _format == "jsono": return self.redirect( request, path_with_added_args( request, {"_shape": "objects"}, path=request.path.rsplit(".jsono", 1)[0] + ".json", ), forward_querystring=False, ) if _format in self.ds.renderers.keys(): # Dispatch request to the correct output format renderer # (CSV is not handled here due to streaming) result = self.ds.renderers[_format](request.args, data, self.name) if result is None: raise NotFound("No data") response_args = { "content_type": result.get("content_type", "text/plain"), "status": result.get("status_code", 200), } if type(result.get("body")) == bytes: response_args["body_bytes"] = result.get("body") else: response_args["body"] = result.get("body") r = response.HTTPResponse(**response_args) else: extras = {} if callable(extra_template_data): extras = extra_template_data() if asyncio.iscoroutine(extras): extras = await extras else: extras = extra_template_data url_labels_extra = {} if data.get("expandable_columns"): url_labels_extra = {"_labels": "on"} renderers = { key: path_with_format(request, key, {**url_labels_extra}) for key in self.ds.renderers.keys() } url_csv_args = {"_size": "max", **url_labels_extra} url_csv = path_with_format(request, "csv", url_csv_args) url_csv_path = url_csv.split("?")[0] context = { **data, **extras, **{ "renderers": renderers, "url_csv": url_csv, "url_csv_path": url_csv_path, "url_csv_hidden_args": [ (key, value) for key, value in urllib.parse.parse_qsl(request.query_string) if key not in ("_labels", "_facet", "_size") ] + [("_size", "max")], "datasette_version": __version__, "config": self.ds.config_dict(), }, } if "metadata" not in context: context["metadata"] = self.ds.metadata r = self.render(templates, **context) r.status = status_code ttl = request.args.get("_ttl", None) if ttl is None or not ttl.isdigit(): if correct_hash_provided: ttl = self.ds.config("default_cache_ttl_hashed") else: ttl = self.ds.config("default_cache_ttl") return self.set_response_headers(r, ttl)
async def get(self, request): posts = await db_api.get_posts_by_search(request.json) if posts: return posts raise NotFound(f"no found posts by search: " f"{request.json.get('search')}")
async def get(self, request, post_id): posts = await db_api.get_post_by_id(post_id) if posts: return posts raise NotFound(f"no post with {post_id} id")
async def get(self, request, section_id): posts = await db_api.get_all_posts(section_id) if posts: return posts raise NotFound(f"no posts in section {section_id}")
async def get(self, request, section_id, page_number): page_number = 1 if page_number in [0, 1] else page_number posts = await db_api.get_posts_by_page(section_id, page_number) if posts: return posts raise NotFound(f"no posts on {page_number} page")
def _get(self, url, method, host): """Get a request handler based on the URL of the request, or raises an error. Internal method for caching. :param url: request URL :param method: request method :return: handler, arguments, keyword arguments """ # 去掉url中的转义字符,编码序列替换为uncode url = unquote(host + url) # Check against known static routes # Route = namedtuple( # "Route", ["handler", "methods", "pattern", "parameters", "name", "uri"] # ) route = self.routes_static.get(url) method_not_supported = MethodNotSupported( f"Method {method} not allowed for URL {url}", method=method, allowed_methods=self.get_supported_methods(url), ) if route: if route.methods and method not in route.methods: raise method_not_supported match = route.pattern.match(url) else: route_found = False # Move on to testing all regex routes for route in self.routes_dynamic[url_hash(url)]: match = route.pattern.match(url) # 只要match 是1就是1 TODO not_know route_found |= match is not None # Do early method checking if match and method in route.methods: # 跳过finally,不再检测routes_always_check break else: #like finally # Lastly, check against all regex routes that cannot be hashed for route in self.routes_always_check: match = route.pattern.match(url) route_found |= match is not None # Do early method checking if match and method in route.methods: # 跳过finally break else: # Route was found but the methods didn't match if route_found: raise method_not_supported raise NotFound(f"Requested URL {url} not found") # 将match.groups(1)与route.parameters进行打包为元组,https://www.runoob.com/python/python-func-zip.html # match.groups(1)代表是将match的结果分组返回比如match的结果是123abc!@#,第一组是123,二组是abc,三组是!@# kwargs = { p.name: p.cast(value) for value, p in zip(match.groups(1), route.parameters) } route_handler = route.handler if hasattr(route_handler, "handlers"): route_handler = route_handler.handlers[method] return route_handler, [], kwargs, route.uri, route.name
async def get_user_by_id_method(request, user_id): user = await db_api.get_user_by_id(UserById({'user_id': user_id}).user_id) if user: return json(user, status=200) raise NotFound(f'User with id {user_id} not found')
def raise_not_found_exception(model, **kwargs): message = "Unable to find {}".format(model.__name__) if kwargs: message += " with " + ", ".join("{!s}={!r}".format(key, val) for (key, val) in kwargs.items()) raise NotFound(message)
async def data( self, request, database, hash, table, default_labels=False, _next=None, _size=None, ): canned_query = self.ds.get_canned_query(database, table) if canned_query is not None: return await self.custom_sql( request, database, hash, canned_query["sql"], metadata=canned_query, editable=False, canned_query=table, ) db = self.ds.databases[database] is_view = bool(await db.get_view_definition(table)) table_exists = bool(await db.table_exists(table)) if not is_view and not table_exists: raise NotFound("Table not found: {}".format(table)) pks = await db.primary_keys(table) use_rowid = not pks and not is_view if use_rowid: select = "rowid, *" order_by = "rowid" order_by_pks = "rowid" else: select = "*" order_by_pks = ", ".join([escape_sqlite(pk) for pk in pks]) order_by = order_by_pks if is_view: order_by = "" # We roll our own query_string decoder because by default Sanic # drops anything with an empty value e.g. ?name__exact= args = RequestParameters( urllib.parse.parse_qs(request.query_string, keep_blank_values=True)) # Special args start with _ and do not contain a __ # That's so if there is a column that starts with _ # it can still be queried using ?_col__exact=blah special_args = {} special_args_lists = {} other_args = [] for key, value in args.items(): if key.startswith("_") and "__" not in key: special_args[key] = value[0] special_args_lists[key] = value else: for v in value: other_args.append((key, v)) # Handle ?_filter_column and redirect, if present redirect_params = filters_should_redirect(special_args) if redirect_params: return self.redirect( request, path_with_added_args(request, redirect_params), forward_querystring=False, ) # Spot ?_sort_by_desc and redirect to _sort_desc=(_sort) if "_sort_by_desc" in special_args: return self.redirect( request, path_with_added_args( request, { "_sort_desc": special_args.get("_sort"), "_sort_by_desc": None, "_sort": None, }, ), forward_querystring=False, ) table_metadata = self.ds.table_metadata(database, table) units = table_metadata.get("units", {}) filters = Filters(sorted(other_args), units, ureg) where_clauses, params = filters.build_where_clauses(table) extra_wheres_for_ui = [] # Add _where= from querystring if "_where" in request.args: if not self.ds.config("allow_sql"): raise DatasetteError("_where= is not allowed", status=400) else: where_clauses.extend(request.args["_where"]) extra_wheres_for_ui = [{ "text": text, "remove_url": path_with_removed_args(request, {"_where": text}), } for text in request.args["_where"]] # Support for ?_through={table, column, value} extra_human_descriptions = [] if "_through" in request.args: for through in request.args["_through"]: through_data = json.loads(through) through_table = through_data["table"] other_column = through_data["column"] value = through_data["value"] outgoing_foreign_keys = await db.get_outbound_foreign_keys( through_table) try: fk_to_us = [ fk for fk in outgoing_foreign_keys if fk["other_table"] == table ][0] except IndexError: raise DatasetteError( "Invalid _through - could not find corresponding foreign key" ) param = "p{}".format(len(params)) where_clauses.append( "{our_pk} in (select {our_column} from {through_table} where {other_column} = :{param})" .format( through_table=escape_sqlite(through_table), our_pk=escape_sqlite(fk_to_us["other_column"]), our_column=escape_sqlite(fk_to_us["column"]), other_column=escape_sqlite(other_column), param=param, )) params[param] = value extra_human_descriptions.append('{}.{} = "{}"'.format( through_table, other_column, value)) # _search support: fts_table = special_args.get("_fts_table") fts_table = fts_table or table_metadata.get("fts_table") fts_table = fts_table or await db.fts_table(table) fts_pk = special_args.get("_fts_pk", table_metadata.get("fts_pk", "rowid")) search_args = dict(pair for pair in special_args.items() if pair[0].startswith("_search")) search = "" if fts_table and search_args: if "_search" in search_args: # Simple ?_search=xxx search = search_args["_search"] where_clauses.append( "{fts_pk} in (select rowid from {fts_table} where {fts_table} match :search)" .format(fts_table=escape_sqlite(fts_table), fts_pk=escape_sqlite(fts_pk))) extra_human_descriptions.append( 'search matches "{}"'.format(search)) params["search"] = search else: # More complex: search against specific columns for i, (key, search_text) in enumerate(search_args.items()): search_col = key.split("_search_", 1)[1] if search_col not in await db.table_columns(fts_table): raise DatasetteError("Cannot search by that column", status=400) where_clauses.append( "rowid in (select rowid from {fts_table} where {search_col} match :search_{i})" .format( fts_table=escape_sqlite(fts_table), search_col=escape_sqlite(search_col), i=i, )) extra_human_descriptions.append( 'search column "{}" matches "{}"'.format( search_col, search_text)) params["search_{}".format(i)] = search_text sortable_columns = set() sortable_columns = await self.sortable_columns_for_table( database, table, use_rowid) # Allow for custom sort order sort = special_args.get("_sort") if sort: if sort not in sortable_columns: raise DatasetteError("Cannot sort table by {}".format(sort)) order_by = escape_sqlite(sort) sort_desc = special_args.get("_sort_desc") if sort_desc: if sort_desc not in sortable_columns: raise DatasetteError( "Cannot sort table by {}".format(sort_desc)) if sort: raise DatasetteError( "Cannot use _sort and _sort_desc at the same time") order_by = "{} desc".format(escape_sqlite(sort_desc)) from_sql = "from {table_name} {where}".format( table_name=escape_sqlite(table), where=("where {} ".format(" and ".join(where_clauses))) if where_clauses else "", ) # Copy of params so we can mutate them later: from_sql_params = dict(**params) count_sql = "select count(*) {}".format(from_sql) _next = _next or special_args.get("_next") offset = "" if _next: if is_view: # _next is an offset offset = " offset {}".format(int(_next)) else: components = urlsafe_components(_next) # If a sort order is applied, the first of these is the sort value if sort or sort_desc: sort_value = components[0] # Special case for if non-urlencoded first token was $null if _next.split(",")[0] == "$null": sort_value = None components = components[1:] # Figure out the SQL for next-based-on-primary-key first next_by_pk_clauses = [] if use_rowid: next_by_pk_clauses.append("rowid > :p{}".format( len(params))) params["p{}".format(len(params))] = components[0] else: # Apply the tie-breaker based on primary keys if len(components) == len(pks): param_len = len(params) next_by_pk_clauses.append( compound_keys_after_sql(pks, param_len)) for i, pk_value in enumerate(components): params["p{}".format(param_len + i)] = pk_value # Now add the sort SQL, which may incorporate next_by_pk_clauses if sort or sort_desc: if sort_value is None: if sort_desc: # Just items where column is null ordered by pk where_clauses.append( "({column} is null and {next_clauses})".format( column=escape_sqlite(sort_desc), next_clauses=" and ".join( next_by_pk_clauses), )) else: where_clauses.append( "({column} is not null or ({column} is null and {next_clauses}))" .format( column=escape_sqlite(sort), next_clauses=" and ".join( next_by_pk_clauses), )) else: where_clauses.append( "({column} {op} :p{p}{extra_desc_only} or ({column} = :p{p} and {next_clauses}))" .format( column=escape_sqlite(sort or sort_desc), op=">" if sort else "<", p=len(params), extra_desc_only="" if sort else " or {column2} is null".format( column2=escape_sqlite(sort or sort_desc)), next_clauses=" and ".join(next_by_pk_clauses), )) params["p{}".format(len(params))] = sort_value order_by = "{}, {}".format(order_by, order_by_pks) else: where_clauses.extend(next_by_pk_clauses) where_clause = "" if where_clauses: where_clause = "where {} ".format(" and ".join(where_clauses)) if order_by: order_by = "order by {} ".format(order_by) # _group_count=col1&_group_count=col2 group_count = special_args_lists.get("_group_count") or [] if group_count: sql = 'select {group_cols}, count(*) as "count" from {table_name} {where} group by {group_cols} order by "count" desc limit 100'.format( group_cols=", ".join('"{}"'.format(group_count_col) for group_count_col in group_count), table_name=escape_sqlite(table), where=where_clause, ) return await self.custom_sql(request, database, hash, sql, editable=True) extra_args = {} # Handle ?_size=500 page_size = _size or request.raw_args.get("_size") if page_size: if page_size == "max": page_size = self.ds.max_returned_rows try: page_size = int(page_size) if page_size < 0: raise ValueError except ValueError: raise DatasetteError("_size must be a positive integer", status=400) if page_size > self.ds.max_returned_rows: raise DatasetteError("_size must be <= {}".format( self.ds.max_returned_rows), status=400) extra_args["page_size"] = page_size else: page_size = self.ds.page_size sql_no_limit = "select {select} from {table_name} {where}{order_by}".format( select=select, table_name=escape_sqlite(table), where=where_clause, order_by=order_by, ) sql = "{sql_no_limit} limit {limit}{offset}".format( sql_no_limit=sql_no_limit.rstrip(), limit=page_size + 1, offset=offset) if request.raw_args.get("_timelimit"): extra_args["custom_time_limit"] = int( request.raw_args["_timelimit"]) results = await self.ds.execute(database, sql, params, truncate=True, **extra_args) # Number of filtered rows in whole set: filtered_table_rows_count = None if count_sql: try: count_rows = list(await self.ds.execute(database, count_sql, from_sql_params)) filtered_table_rows_count = count_rows[0][0] except QueryInterrupted: pass # facets support if not self.ds.config("allow_facet") and any( arg.startswith("_facet") for arg in request.args): raise DatasetteError("_facet= is not allowed", status=400) # pylint: disable=no-member facet_classes = list( itertools.chain.from_iterable(pm.hook.register_facet_classes())) facet_results = {} facets_timed_out = [] facet_instances = [] for klass in facet_classes: facet_instances.append( klass( self.ds, request, database, sql=sql_no_limit, params=params, table=table, metadata=table_metadata, row_count=filtered_table_rows_count, )) for facet in facet_instances: instance_facet_results, instance_facets_timed_out = ( await facet.facet_results()) facet_results.update(instance_facet_results) facets_timed_out.extend(instance_facets_timed_out) # Figure out columns and rows for the query columns = [r[0] for r in results.description] rows = list(results.rows) filter_columns = columns[:] if use_rowid and filter_columns[0] == "rowid": filter_columns = filter_columns[1:] # Expand labeled columns if requested expanded_columns = [] expandable_columns = await self.expandable_columns(database, table) columns_to_expand = None try: all_labels = value_as_boolean(special_args.get("_labels", "")) except ValueError: all_labels = default_labels # Check for explicit _label= if "_label" in request.args: columns_to_expand = request.args["_label"] if columns_to_expand is None and all_labels: # expand all columns with foreign keys columns_to_expand = [fk["column"] for fk, _ in expandable_columns] if columns_to_expand: expanded_labels = {} for fk, _ in expandable_columns: column = fk["column"] if column not in columns_to_expand: continue expanded_columns.append(column) # Gather the values column_index = columns.index(column) values = [row[column_index] for row in rows] # Expand them expanded_labels.update(await self.ds.expand_foreign_keys( database, table, column, values)) if expanded_labels: # Rewrite the rows new_rows = [] for row in rows: new_row = CustomRow(columns) for column in row.keys(): value = row[column] if (column, value) in expanded_labels: new_row[column] = { "value": value, "label": expanded_labels[(column, value)], } else: new_row[column] = value new_rows.append(new_row) rows = new_rows # Pagination next link next_value = None next_url = None if len(rows) > page_size and page_size > 0: if is_view: next_value = int(_next or 0) + page_size else: next_value = path_from_row_pks(rows[-2], pks, use_rowid) # If there's a sort or sort_desc, add that value as a prefix if (sort or sort_desc) and not is_view: prefix = rows[-2][sort or sort_desc] if isinstance(prefix, dict) and "value" in prefix: prefix = prefix["value"] if prefix is None: prefix = "$null" else: prefix = urllib.parse.quote_plus(str(prefix)) next_value = "{},{}".format(prefix, next_value) added_args = {"_next": next_value} if sort: added_args["_sort"] = sort else: added_args["_sort_desc"] = sort_desc else: added_args = {"_next": next_value} next_url = self.ds.absolute_url( request, path_with_replaced_args(request, added_args)) rows = rows[:page_size] # Detect suggested facets suggested_facets = [] if (self.ds.config("suggest_facets") and self.ds.config("allow_facet") and not _next): for facet in facet_instances: suggested_facets.extend(await facet.suggest()) # human_description_en combines filters AND search, if provided human_description_en = filters.human_description_en( extra=extra_human_descriptions) if sort or sort_desc: sorted_by = "sorted by {}{}".format( (sort or sort_desc), " descending" if sort_desc else "") human_description_en = " ".join( [b for b in [human_description_en, sorted_by] if b]) async def extra_template(): display_columns, display_rows = await self.display_columns_and_rows( database, table, results.description, rows, link_column=not is_view, truncate_cells=self.ds.config("truncate_cells_html"), ) metadata = ((self.ds.metadata("databases") or {}).get(database, {}).get("tables", {}).get(table, {})) self.ds.update_with_inherited_metadata(metadata) form_hidden_args = [] for arg in ("_fts_table", "_fts_pk"): if arg in special_args: form_hidden_args.append((arg, special_args[arg])) return { "supports_search": bool(fts_table), "search": search or "", "use_rowid": use_rowid, "filters": filters, "display_columns": display_columns, "filter_columns": filter_columns, "display_rows": display_rows, "facets_timed_out": facets_timed_out, "sorted_facet_results": sorted( facet_results.values(), key=lambda f: (len(f["results"]), f["name"]), reverse=True, ), "extra_wheres_for_ui": extra_wheres_for_ui, "form_hidden_args": form_hidden_args, "is_sortable": any(c["sortable"] for c in display_columns), "path_with_replaced_args": path_with_replaced_args, "path_with_removed_args": path_with_removed_args, "append_querystring": append_querystring, "request": request, "sort": sort, "sort_desc": sort_desc, "disable_sort": is_view, "custom_rows_and_columns_templates": [ "_rows_and_columns-{}-{}.html".format( to_css_class(database), to_css_class(table)), "_rows_and_columns-table-{}-{}.html".format( to_css_class(database), to_css_class(table)), "_rows_and_columns.html", ], "metadata": metadata, "view_definition": await db.get_view_definition(table), "table_definition": await db.get_table_definition(table), } return ( { "database": database, "table": table, "is_view": is_view, "human_description_en": human_description_en, "rows": rows[:page_size], "truncated": results.truncated, "filtered_table_rows_count": filtered_table_rows_count, "expanded_columns": expanded_columns, "expandable_columns": expandable_columns, "columns": columns, "primary_keys": pks, "units": units, "query": { "sql": sql, "params": params }, "facet_results": facet_results, "suggested_facets": suggested_facets, "next": next_value and str(next_value) or None, "next_url": next_url, }, extra_template, ( "table-{}-{}.html".format(to_css_class(database), to_css_class(table)), "table.html", ), )
async def put(self, request, post_id): post = await db_api.put_post(request.json, post_id) if post: return post raise NotFound(f"no post with {post_id} id")
async def get_or_404(cls, *args, **kwargs): # noinspection PyUnresolvedReferences rv = await cls.get(*args, **kwargs) if rv is None: raise NotFound('{} is not found'.format(cls.__name__)) return rv
async def delete(self, request, post_id): res = await db_api.delete_post(post_id) if res: return {"id": post_id} raise NotFound(f"no post with {post_id} id")
async def resolve_db_name(self, request, db_name, **kwargs): hash = None name = None if "-" in db_name: # Might be name-and-hash, or might just be # a name with a hyphen in it name, hash = db_name.rsplit("-", 1) if name not in self.ds.databases: # Try the whole name name = db_name hash = None else: name = db_name # Verify the hash try: db = self.ds.databases[name] except KeyError: raise NotFound("Database not found: {}".format(name)) expected = "000" if db.hash is not None: expected = db.hash[:HASH_LENGTH] correct_hash_provided = expected == hash if not correct_hash_provided: if "table_and_format" in kwargs: async def async_table_exists(t): return await db.table_exists(t) table, _format = await resolve_table_and_format( table_and_format=urllib.parse.unquote_plus( kwargs["table_and_format"] ), table_exists=async_table_exists, allowed_formats=self.ds.renderers.keys(), ) kwargs["table"] = table if _format: kwargs["as_format"] = ".{}".format(_format) elif "table" in kwargs: kwargs["table"] = urllib.parse.unquote_plus(kwargs["table"]) should_redirect = "/{}-{}".format(name, expected) if "table" in kwargs: should_redirect += "/" + urllib.parse.quote_plus(kwargs["table"]) if "pk_path" in kwargs: should_redirect += "/" + kwargs["pk_path"] if "as_format" in kwargs: should_redirect += kwargs["as_format"] if "as_db" in kwargs: should_redirect += kwargs["as_db"] if ( (self.ds.config("hash_urls") or "_hash" in request.args) and # Redirect only if database is immutable not self.ds.databases[name].is_mutable ): return name, expected, correct_hash_provided, should_redirect return name, expected, correct_hash_provided, None
async def data(self, request, name, hash, table, pk_path): table = urllib.parse.unquote_plus(table) pk_values = urlsafe_components(pk_path) info = self.ds.inspect()[name] table_info = info['tables'].get(table) or {} pks = table_info.get('primary_keys') or [] use_rowid = not pks select = '*' if use_rowid: select = 'rowid, *' pks = ['rowid'] wheres = ['"{}"=:p{}'.format(pk, i) for i, pk in enumerate(pks)] sql = 'select {} from "{}" where {}'.format(select, table, ' AND '.join(wheres)) params = {} for i, pk_value in enumerate(pk_values): params['p{}'.format(i)] = pk_value # rows, truncated, description = await self.execute(name, sql, params, truncate=True) rows, truncated, description = await self.execute(name, sql, params, truncate=True) columns = [r[0] for r in description] rows = list(rows) if not rows: raise NotFound('Record not found: {}'.format(pk_values)) async def template_data(): display_columns, display_rows = await self.display_columns_and_rows( name, table, description, rows, link_column=False, expand_foreign_keys=True) for column in display_columns: column['sortable'] = False return { 'database_hash': hash, 'foreign_key_tables': await self.foreign_key_tables(name, table, pk_values), 'display_columns': display_columns, 'display_rows': display_rows, 'custom_rows_and_columns_templates': [ '_rows_and_columns-{}-{}.html'.format( to_css_class(name), to_css_class(table)), '_rows_and_columns-row-{}-{}.html'.format( to_css_class(name), to_css_class(table)), '_rows_and_columns.html', ], 'metadata': self.ds.metadata.get('databases', {}).get(name, {}).get('tables', {}).get(table, {}), } data = { 'database': name, 'table': table, 'rows': rows, 'columns': columns, 'primary_keys': pks, 'primary_key_values': pk_values, } if 'foreign_key_tables' in (request.raw_args.get('_extras') or '').split(','): data['foreign_key_tables'] = await self.foreign_key_tables( name, table, pk_values) return data, template_data, ('row-{}-{}.html'.format( to_css_class(name), to_css_class(table)), 'row.html')
async def get_training_state(request, run_id): logger.info(f"Looking for \"./training_state/{run_id}__state.pkl.gz\"") if os.path.exists(f"./training_state/{run_id}__state.pkl.gz"): return await file_stream(f"./training_state/{run_id}__state.pkl.gz", filename=f"{run_id}__state.pkl.gz") raise NotFound("No state exists for that id")
async def not_found(request): raise NotFound("Not found")
async def get_local_state(request, label): if os.path.exists(f"./outputs/{label}_performance.png"): return await file(f"./outputs/{label}_performance.png") raise NotFound("No state exists for that id")
async def fetch_entity_by_id(conn, entity, id): result_proxy = await conn.execute(entity.select().where(entity.c.id == id)) row_proxy = await result_proxy.fetchone() if row_proxy is None: raise NotFound('Not found') return row_proxy
async def get_mapa(request, id): data = mapa.get_mapa(id) if data is None: raise NotFound("Mapa '%s' not found" % id) return json(data)
async def authentication(request): if not utils.is_key_match(request.url): raise NotFound("URL not found or key not recognised.")
async def data(self, request, name, hash, table, pk_path, default_labels=False): pk_values = urlsafe_components(pk_path) info = self.ds.inspect()[name] table_info = info["tables"].get(table) or {} pks = table_info.get("primary_keys") or [] use_rowid = not pks select = "*" if use_rowid: select = "rowid, *" pks = ["rowid"] wheres = ['"{}"=:p{}'.format(pk, i) for i, pk in enumerate(pks)] sql = 'select {} from {} where {}'.format(select, escape_sqlite(table), " AND ".join(wheres)) params = {} for i, pk_value in enumerate(pk_values): params["p{}".format(i)] = pk_value results = await self.ds.execute(name, sql, params, truncate=True) columns = [r[0] for r in results.description] rows = list(results.rows) if not rows: raise NotFound("Record not found: {}".format(pk_values)) async def template_data(): display_columns, display_rows = await self.display_columns_and_rows( name, table, results.description, rows, link_column=False, truncate_cells=0, ) for column in display_columns: column["sortable"] = False return { "database_hash": hash, "foreign_key_tables": await self.foreign_key_tables(name, table, pk_values), "display_columns": display_columns, "display_rows": display_rows, "custom_rows_and_columns_templates": [ "_rows_and_columns-{}-{}.html".format( to_css_class(name), to_css_class(table)), "_rows_and_columns-row-{}-{}.html".format( to_css_class(name), to_css_class(table)), "_rows_and_columns.html", ], "metadata": self.ds.metadata.get("databases", {}).get(name, {}).get("tables", {}).get(table, {}), } data = { "database": name, "table": table, "rows": rows, "columns": columns, "primary_keys": pks, "primary_key_values": pk_values, "units": self.table_metadata(name, table).get("units", {}), } if "foreign_key_tables" in (request.raw_args.get("_extras") or "").split(","): data["foreign_key_tables"] = await self.foreign_key_tables( name, table, pk_values) return data, template_data, ("row-{}-{}.html".format( to_css_class(name), to_css_class(table)), "row.html")
def get_email(id): try: return mail.Email.objects(address=id).get() except (mail.Email.DoesNotExist, KeyError): raise NotFound('could not find email with address %s' % (id)) return email
async def test_acl_async_abort_404(request): raise NotFound("")
def __init__(self, message, error_id=None): NotFound.__init__(self, message) SynseError.__init__(self, message, error_id)