def explorer_view(provider=None, dataset=None, series=None): """ http://127.0.0.1:8081/views/explorer/dataset/insee-cna-2005-ere-a88 http://127.0.0.1:8081/views/explorer/insee http://127.0.0.1:8081/views/explorer/insee/insee-cna-2005-ere-a88 http://127.0.0.1:8081/views/explorer http://127.0.0.1:8081/views """ if not dataset and not provider: provider = current_app.config.get('DEFAULT_PROVIDER', None) dataset = current_app.config.get('DEFAULT_DATASET', None) if dataset: doc = queries.get_dataset(dataset) #provider_doc = queries.col_providers().find_one({"name": doc["provider_name"]},{"slug": True}) provider = doc["provider_name"].lower() elif provider: provider_doc = queries.get_provider(provider, {"slug": True, "name": True, "enable": True}) dataset_doc = queries.col_datasets().find_one({"provider_name": provider_doc["name"], "enable": True}, {"slug": True}) dataset = dataset_doc["slug"] ctx = { "selectedProvider": provider, "selectedDataset": dataset, } return render_template("explorer.html", **ctx)
def ajax_datasets_list(provider): """ TODO: covered query provider_name + enable + dataset_code + name + slug ??? partialfilter sur enable = True """ provider_doc = queries.get_provider(provider) query = {'provider_name': provider_doc["name"]} projection = { "_id": False, "enable": True, "dataset_code": True, "name": True, "slug": True } is_meta = request.args.get('is_meta', type=int, default=0) == 1 if is_meta: projection["metadata"] = True docs = [ doc for doc in queries.col_datasets().find(query, projection) if doc["enable"] is True ] return json_tools.json_response(docs)
def ajax_plot_series(slug): series = queries.col_series().find_one({"slug": slug}) if not series: abort(404) ds_projection = {"enable": True} dataset = queries.col_datasets().find_one( { "provider_name": series["provider_name"], "dataset_code": series["dataset_code"] }, ds_projection) if not dataset: abort(404) if dataset["enable"] is False: abort(307) meta = { "provider_name": series["provider_name"], "dataset_code": series["dataset_code"], "name": series["name"], "key": series["key"], "slug": series["slug"], } datas = [] #datas.append(("Dates", "Values")) for period, period_ts, value in datas_from_series(series): datas.append({ "period": period, "period_ts": str(period_ts), "value": value }) return json_tools.json_response(datas, meta)
def ajax_plot_series(slug): series = queries.col_series().find_one({"slug": slug}) if not series: abort(404) ds_projection = {"enable": True} dataset = queries.col_datasets().find_one({"provider_name": series["provider_name"], "dataset_code": series["dataset_code"]}, ds_projection) if not dataset: abort(404) if dataset["enable"] is False: abort(307) meta = { "provider_name": series["provider_name"], "dataset_code": series["dataset_code"], "name": series["name"], "key": series["key"], "slug": series["slug"], } datas = [] #datas.append(("Dates", "Values")) for period, period_ts, value in datas_from_series(series): datas.append({"period": period, "period_ts": str(period_ts), "value": value}) return json_tools.json_response(datas, meta)
def sitemap_datasets(): query = {"enable": True} projection = {'_id': False, "download_last": True, "slug": True} datasets = queries.col_datasets().find(query, projection) for doc in datasets: yield ('views.dataset-by-slug', { 'slug': doc['slug'] }, doc['download_last'], "daily", 0.9)
def atom_feed(): from werkzeug.contrib.atom import AtomFeed now = arrow.utcnow().datetime feed = AtomFeed("Widukind", feed_url=request.url, #url=request.host_url, #updated=now, subtitle="Updated datasets - Last 24 hours" ) query = { "$or": [{"count_inserts": {"$gt": 0}}, {"count_updates": {"$gt": 0}}] } startDate = arrow.utcnow().replace(days=-1).floor("second") query["created"] = {"$gte": startDate.datetime} limit = request.args.get('limit', default=0, type=int) cursor = queries.col_stats_run().find(query) if limit: cursor = cursor.limit(limit) cursor = cursor.sort("created", -1) rows = [doc for doc in cursor] slugs = [] for row in rows: slug = slugify("%s-%s" % (row["provider_name"], row["dataset_code"]), word_boundary=False, save_order=True) slugs.append((row, slug)) query_dataset = {"slug": {"$in": [s[1] for s in slugs]}} projection = {"metadata": False, "concepts": False, "codelists": False} datasets = {doc["slug"]: doc for doc in queries.col_datasets().find(query_dataset, projection)} for row, slug in slugs: dataset = datasets.get(slug) if not dataset["enable"]: continue url = url_for("views.explorer_d", dataset=slug, _external=True) #content = """ #<p>Updated date from provider : %(last_update)s</p> #""" feed.add(title="%s - %s" % (row["provider_name"], row["dataset_code"]), summary=dataset["name"], #content=content % dataset, #content_type="html", url=url, id=slug, updated=row["created"], #dataset["last_update"], published=dataset["download_last"] ) return feed.get_response()
def ajax_tree_view(provider=None): provider = provider or request.args.get('provider') if not provider: abort(404, "provider is required") _provider = queries.get_provider(provider) provider_name = _provider["name"] query = {"provider_name": provider_name, "enable": True} cursor = queries.col_categories().find(query, {"_id": False}) cursor = cursor.sort([("position", 1), ("category_code", 1)]) categories = OrderedDict([(doc["category_code"], doc) for doc in cursor]) ds_codes = [] for_remove = [] for cat in categories.values(): if cat.get("parent"): parent = categories[cat.get("parent")] if not "children" in parent: parent["children"] = [] parent["children"].append(cat) for_remove.append(cat["category_code"]) if cat.get("datasets"): for ds in cat.get("datasets"): ds_codes.append(ds["dataset_code"]) for r in for_remove: categories.pop(r) ds_query = { 'provider_name': provider_name, "enable": True, "dataset_code": { "$in": list(set(ds_codes)) } } ds_projection = {"_id": True, "dataset_code": True, "slug": True} cursor = queries.col_datasets().find(ds_query, ds_projection) dataset_codes = {} for doc in cursor: dataset_codes[doc['dataset_code']] = { "slug": doc['slug'], "url": url_for('.dataset-by-slug', slug=doc["slug"]) } context = dict(provider=_provider, categories=categories, dataset_codes=dataset_codes) data = render_template("datatree_ajax.html", **context) response = current_app.response_class(data, mimetype='application/javascript') response.status_code = 200 response.make_conditional(request) return response
def ajax_tree_view(provider=None): provider = provider or request.args.get('provider') if not provider: abort(404, "provider is required") _provider = queries.get_provider(provider) provider_name = _provider["name"] query = {"provider_name": provider_name, "enable": True} cursor = queries.col_categories().find(query, {"_id": False}) cursor = cursor.sort([("position", 1), ("category_code", 1)]) categories = OrderedDict([(doc["category_code"], doc ) for doc in cursor]) ds_codes = [] for_remove = [] for cat in categories.values(): if cat.get("parent"): parent = categories[cat.get("parent")] if not "children" in parent: parent["children"] = [] parent["children"].append(cat) for_remove.append(cat["category_code"]) if cat.get("datasets"): for ds in cat.get("datasets"): ds_codes.append(ds["dataset_code"]) for r in for_remove: categories.pop(r) ds_query = {'provider_name': provider_name, "enable": True, "dataset_code": {"$in": list(set(ds_codes))}} ds_projection = {"_id": True, "dataset_code": True, "slug": True} cursor = queries.col_datasets().find(ds_query, ds_projection) dataset_codes = {} for doc in cursor: dataset_codes[doc['dataset_code']] = { "slug": doc['slug'], "url": url_for('.dataset-by-slug', slug=doc["slug"]) } context = dict( provider=_provider, categories=categories, dataset_codes=dataset_codes ) data = render_template("datatree_ajax.html", **context) response = current_app.response_class(data, mimetype='application/javascript') response.status_code = 200 response.make_conditional(request) return response
def sitemap_datasets(): query = {"enable": True} projection = {'_id': False, "download_last": True, "slug": True} datasets = queries.col_datasets().find(query, projection) for doc in datasets: yield ('views.dataset-by-slug', {'slug': doc['slug']}, doc['download_last'], "daily", 0.9)
def stats_datasets(): result = list(queries.col_datasets().aggregate([{"$group": {"_id": "$provider_name", "count": {"$sum": 1}}}, {"$sort": {"count": -1} }], allowDiskUse=True)) total = 0 for r in result: total += r["count"] return render_template("admin/stats-datasets.html", result=result, total=total)
def change_status_dataset(slug): query = {"slug": slug} dataset = queries.col_datasets().find_one(query) if not dataset: abort(404) query_update = {} if dataset["enable"]: query_update["enable"] = False else: query_update["enable"] = True queries.col_datasets().find_one_and_update(query, {"$set": query_update}) query = {"name": dataset["provider_name"]} provider = queries.col_providers().find_one(query) return redirect(url_for(".datasets", slug=provider["slug"]))
def all_disable_datasets(): projection = {"dimension_list": False, "attribute_list": False, "concepts": False, "codelists": False} cursor = queries.col_datasets().find({"enable": False}, projection) datasets = cursor.sort("provider_name", 1) return render_template("admin/disable_datasets.html", datasets=datasets)
def change_status_provider(slug): query = {"slug": slug} provider = queries.col_providers().find_one(query) if not provider: abort(404) query_update = {} if provider["enable"]: query_update["enable"] = False else: query_update["enable"] = True queries.col_providers().find_one_and_update(query, {"$set": query_update}) datasets_query = {"provider_name": provider["name"]} queries.col_datasets().update_many(datasets_query, {"$set": query_update}) return redirect(url_for(".providers"))
def all_disable_datasets(): projection = { "dimension_list": False, "attribute_list": False, "concepts": False, "codelists": False } cursor = queries.col_datasets().find({"enable": False}, projection) datasets = cursor.sort("provider_name", 1) return render_template("admin/disable_datasets.html", datasets=datasets)
def all_datasets_for_provider_slug(slug): provider = queries.col_providers().find_one({"slug": slug}) if not provider: abort(404) projection = {"dimension_list": False, "attribute_list": False, "concepts": False, "codelists": False} datasets = queries.col_datasets().find({"provider_name": provider["name"]}, projection) series_counters = queries.series_counter(match={"provider_name": provider["name"]}) return render_template("admin/datasets.html", provider=provider, series_counters=series_counters, datasets=datasets)
def ajax_datasets_list(provider): """ TODO: covered query provider_name + enable + dataset_code + name + slug ??? partialfilter sur enable = True """ provider_doc = queries.get_provider(provider) query = {'provider_name': provider_doc["name"]} projection = {"_id": False, "enable": True, "dataset_code": True, "name": True, "slug": True} is_meta = request.args.get('is_meta', type=int, default=0) == 1 if is_meta: projection["metadata"] = True docs = [doc for doc in queries.col_datasets().find(query, projection) if doc["enable"] is True] return json_tools.json_response(docs)
def stats_datasets(): result = list(queries.col_datasets().aggregate([{ "$group": { "_id": "$provider_name", "count": { "$sum": 1 } } }, { "$sort": { "count": -1 } }], allowDiskUse=True)) total = 0 for r in result: total += r["count"] return render_template("admin/stats-datasets.html", result=result, total=total)
def all_datasets_for_provider_slug(slug): provider = queries.col_providers().find_one({"slug": slug}) if not provider: abort(404) projection = { "dimension_list": False, "attribute_list": False, "concepts": False, "codelists": False } datasets = queries.col_datasets().find({"provider_name": provider["name"]}, projection) series_counters = queries.series_counter( match={"provider_name": provider["name"]}) return render_template("admin/datasets.html", provider=provider, series_counters=series_counters, datasets=datasets)
def explorer_view(provider=None, dataset=None, series=None): """ http://127.0.0.1:8081/views/explorer/dataset/insee-cna-2005-ere-a88 http://127.0.0.1:8081/views/explorer/insee http://127.0.0.1:8081/views/explorer/insee/insee-cna-2005-ere-a88 http://127.0.0.1:8081/views/explorer http://127.0.0.1:8081/views """ if not dataset and not provider: provider = current_app.config.get('DEFAULT_PROVIDER', None) dataset = current_app.config.get('DEFAULT_DATASET', None) if dataset: doc = queries.get_dataset(dataset) # provider_doc = queries.col_providers().find_one({"name": doc["provider_name"]},{"slug": True}) provider = doc["provider_name"].lower() elif provider: provider_doc = queries.get_provider(provider, { "slug": True, "name": True, "enable": True }) dataset_doc = queries.col_datasets().find_one( { "provider_name": provider_doc["name"], "enable": True }, {"slug": True}, ) dataset = dataset_doc["slug"] ctx = { "selectedProvider": provider, "selectedDataset": dataset, } return render_template("explorer.html", **ctx)
def ajax_explorer_datas(): limit = request.args.get('limit', default=100, type=int) if limit > 1000: limit = 1000 #7 669 115 octets pour 1000 series projection = { "_id": False, "dimensions": False, "attributes": False, "values.revisions": False, "notes": False, "tags": False } query = OrderedDict() provider_slug = request.args.get('provider') dataset_slug = request.args.get('dataset') #series_slug = request.args.get('series') search = request.args.get('search') is_eurostat = False if dataset_slug: dataset = queries.get_dataset(dataset_slug) query["provider_name"] = dataset["provider_name"] #query["dataset_code"] = dataset["dataset_code"] if dataset["provider_name"] == "EUROSTAT": is_eurostat = True elif provider_slug: provider = queries.get_provider(provider_slug) query["provider_name"] = provider["name"] if provider["name"] == "EUROSTAT": is_eurostat = True if search: query["$text"] = {"$search": search.strip()} projection['score'] = {'$meta': 'textScore'} disabled_datasets = [] if dataset_slug: query["dataset_code"] = dataset["dataset_code"] else: ds_enabled_query = {"enable": False} if "provider_name" in query: ds_enabled_query["provider_name"] = query["provider_name"] disabled_datasets = [ doc["dataset_code"] for doc in queries.col_datasets().find( ds_enabled_query, {"dataset_code": True}) ] query = complex_queries_series(query) cursor = queries.col_series().find(dict(query), projection) if search: cursor = cursor.sort([('score', {'$meta': 'textScore'})]) #else: # query = {"slug": series_slug} # cursor = queries.col_series().find(dict(query), projection) if limit: cursor = cursor.limit(limit) if is_eurostat: count = 0 else: count = cursor.count() series_list = [doc for doc in cursor] rows = [] for s in series_list: if disabled_datasets and s["dataset_code"] in disabled_datasets: continue if not "version" in s: s["version"] = 0 s['start_date'] = s["values"][0]["period"] s['end_date'] = s["values"][-1]["period"] values = [{ "period": v["period"], "value": v["value"] } for v in s['values']] del s["values"] s["values"] = values s['view'] = url_for('.series-by-slug', slug=s['slug'], modal=1) dataset_slug = slugify("%s-%s" % (s["provider_name"], s["dataset_code"]), word_boundary=False, save_order=True) s['view_dataset'] = url_for('.dataset-by-slug', slug=dataset_slug, modal=1) #s["view_explorer"] = url_for('.explorer_s', series=s['slug'], _external=True) s['dataset_slug'] = dataset_slug s['export_csv'] = url_for('.export-series-csv', slug=s['slug']) s['url_series_plot'] = url_for('.ajax_series_plot', slug=s['slug']) s['url_cart_add'] = url_for('.ajax-cart-add', slug=s['slug']) #TODO: s['url_dataset'] = url_for('.dataset', id=s['_id']) s['frequency_txt'] = s['frequency'] if s['frequency'] in constants.FREQUENCIES_DICT: s['frequency_txt'] = constants.FREQUENCIES_DICT[s['frequency']] rows.append(s) return json_tools.json_response(rows, {"total": count})
def series_with_slug(slug, version): """ Dans tous les cas: - charger la version latest - charger toutes les révisions antérieurs à la version latest """ is_modal = request.args.get('modal', default=0, type=int) is_debug = request.args.get('debug') #_version = request.args.get('version', default="latest") is_latest = True query = {"slug": slug} '''Load always latest series from col series''' series_latest = queries.col_series().find_one(query) if not series_latest: abort(404) if version >= 0 and version != series_latest['version']: query['version'] = version store = queries.col_series_archives().find_one(query) if not store: abort(404) series = series_archives_load(store) is_latest = False else: series = series_latest provider = queries.col_providers().find_one( {"name": series_latest['provider_name']}, {"metadata": False}) if not provider: abort(404) if provider["enable"] is False: abort(307) dataset = queries.col_datasets().find_one( { 'provider_name': series_latest['provider_name'], "dataset_code": series_latest['dataset_code'] }, {"metadata": False}) if not dataset: abort(404) if dataset["enable"] is False: abort(307) if is_debug: '''debug mode''' result_provider = render_template_string("{{ provider|pprint|safe }}", provider=provider) result_dataset = render_template_string("{{ dataset|pprint|safe }}", dataset=dataset) result_series = render_template_string("{{ series|pprint|safe }}", series=series) return current_app.jsonify( dict(provider=result_provider, dataset=result_dataset, series=result_series)) '''Load revisions < current version''' revisions = [] if "version" in series: query_revisions = {"slug": slug, "version": {"$lt": series["version"]}} count_values = len(series['values']) for store in queries.col_series_archives().find(query_revisions).sort( 'version', DESCENDING): series_rev = series_archives_load(store) values = series_rev['values'] empty_element = count_values - len(values) values.reverse() for i in range(empty_element): values.insert(0, None) revisions.append({ "last_update_ds": series_rev['last_update_ds'], "version": series_rev['version'], "values": values, "name": series_rev["name"], "url": url_for('.series-by-slug-version', slug=slug, version=series_rev['version']) }) else: series["version"] = 0 if not "last_update_ds" in series: series["last_update_ds"] = dataset["last_update"] series["last_update_widu"] = dataset["last_update"] #view_explorer = url_for('.explorer_s', series=slug, _external=True) url_provider = url_for('.explorer_p', provider=provider["slug"]) url_dataset = url_for('.explorer_d', dataset=dataset["slug"]) url_dataset_direct = url_for('.dataset-by-slug', slug=dataset["slug"], _external=True) url_series = url_for('.series-by-slug-version', slug=slug, version=series["version"], _external=True) url_series_latest = url_for('.series-by-slug-version', slug=slug, version=series_latest["version"]) url_series_plot = url_for('.ajax_series_plot', slug=slug) url_export_csv = url_for('.export-series-csv', slug=slug) dimension_filter = ".".join( [series["dimensions"][key] for key in dataset["dimension_keys"]]) result = render_template( "series-unit-modal.html", url_provider=url_provider, url_dataset=url_dataset, url_dataset_direct=url_dataset_direct, url_series=url_series, url_series_latest=url_series_latest, url_series_plot=url_series_plot, url_export_csv=url_export_csv, series=series, is_modal=is_modal, provider=provider, dataset=dataset, is_latest=is_latest, revisions=revisions, #max_version=max_version, #view_explorer=view_explorer, dimension_filter=dimension_filter.upper(), #is_reverse=is_reverse, #obs_attributes_keys=list(set(obs_attributes_keys)), #obs_attributes_values=list(set(obs_attributes_values)), #revision_dates=list(set(revision_dates)), #max_revisions=max_revisions ) return result
def atom_feed(): from werkzeug.contrib.atom import AtomFeed now = arrow.utcnow().datetime feed = AtomFeed( "DB.nomics", feed_url=request.url, #url=request.host_url, #updated=now, subtitle="Updated datasets - Last 24 hours") query = { "$or": [{ "count_inserts": { "$gt": 0 } }, { "count_updates": { "$gt": 0 } }] } startDate = arrow.utcnow().replace(days=-1).floor("second") query["created"] = {"$gte": startDate.datetime} limit = request.args.get('limit', default=0, type=int) cursor = queries.col_stats_run().find(query) if limit: cursor = cursor.limit(limit) cursor = cursor.sort("created", -1) rows = [doc for doc in cursor] slugs = [] for row in rows: slug = slugify("%s-%s" % (row["provider_name"], row["dataset_code"]), word_boundary=False, save_order=True) slugs.append((row, slug)) query_dataset = {"slug": {"$in": [s[1] for s in slugs]}} projection = {"metadata": False, "concepts": False, "codelists": False} datasets = { doc["slug"]: doc for doc in queries.col_datasets().find(query_dataset, projection) } for row, slug in slugs: dataset = datasets.get(slug) if not dataset["enable"]: continue url = url_for("views.explorer_d", dataset=slug, _external=True) #content = """ #<p>Updated date from provider : %(last_update)s</p> #""" feed.add( title="%s - %s" % (row["provider_name"], row["dataset_code"]), summary=dataset["name"], #content=content % dataset, #content_type="html", url=url, id=slug, updated=row["created"], #dataset["last_update"], published=dataset["download_last"]) return feed.get_response()
def ajax_explorer_datas(): limit = request.args.get('limit', default=100, type=int) if limit > 1000: limit = 1000 #7 669 115 octets pour 1000 series projection = { "_id": False, "dimensions": False, "attributes": False, "values.revisions": False, "notes": False, "tags": False } query = OrderedDict() provider_slug = request.args.get('provider') dataset_slug = request.args.get('dataset') #series_slug = request.args.get('series') search = request.args.get('search') is_eurostat = False if dataset_slug: dataset = queries.get_dataset(dataset_slug) query["provider_name"] = dataset["provider_name"] #query["dataset_code"] = dataset["dataset_code"] if dataset["provider_name"] == "EUROSTAT": is_eurostat = True elif provider_slug: provider = queries.get_provider(provider_slug) query["provider_name"] = provider["name"] if provider["name"] == "EUROSTAT": is_eurostat = True if search: query["$text"] = {"$search": search.strip()} projection['score'] = {'$meta': 'textScore'} disabled_datasets = [] if dataset_slug: query["dataset_code"] = dataset["dataset_code"] else: ds_enabled_query = {"enable": False} if "provider_name" in query: ds_enabled_query["provider_name"] = query["provider_name"] disabled_datasets = [doc["dataset_code"] for doc in queries.col_datasets().find(ds_enabled_query, {"dataset_code": True})] query = complex_queries_series(query) cursor = queries.col_series().find(dict(query), projection) if search: cursor = cursor.sort([('score', {'$meta': 'textScore'})]) #else: # query = {"slug": series_slug} # cursor = queries.col_series().find(dict(query), projection) if limit: cursor = cursor.limit(limit) if is_eurostat: count = 0 else: count = cursor.count() series_list = [doc for doc in cursor] rows = [] for s in series_list: if disabled_datasets and s["dataset_code"] in disabled_datasets: continue if not "version" in s: s["version"] = 0 s['start_date'] = s["values"][0]["period"] s['end_date'] = s["values"][-1]["period"] values = [{"period": v["period"], "value": v["value"]} for v in s['values']] del s["values"] s["values"] = values s['view'] = url_for('.series-by-slug', slug=s['slug'], modal=1) dataset_slug = slugify("%s-%s" % (s["provider_name"], s["dataset_code"]), word_boundary=False, save_order=True) s['view_dataset'] = url_for('.dataset-by-slug', slug=dataset_slug, modal=1) #s["view_explorer"] = url_for('.explorer_s', series=s['slug'], _external=True) s['dataset_slug'] = dataset_slug s['export_csv'] = url_for('.export-series-csv', slug=s['slug']) s['url_series_plot'] = url_for('.ajax_series_plot', slug=s['slug']) s['url_cart_add'] = url_for('.ajax-cart-add', slug=s['slug']) #TODO: s['url_dataset'] = url_for('.dataset', id=s['_id']) s['frequency_txt'] = s['frequency'] if s['frequency'] in constants.FREQUENCIES_DICT: s['frequency_txt'] = constants.FREQUENCIES_DICT[s['frequency']] rows.append(s) return json_tools.json_response(rows, {"total": count})
def series_with_slug(slug, version): """ Dans tous les cas: - charger la version latest - charger toutes les révisions antérieurs à la version latest """ is_modal = request.args.get('modal', default=0, type=int) is_debug = request.args.get('debug') #_version = request.args.get('version', default="latest") is_latest = True query = {"slug": slug} '''Load always latest series from col series''' series_latest = queries.col_series().find_one(query) if not series_latest: abort(404) if version >= 0 and version != series_latest['version']: query['version'] = version store = queries.col_series_archives().find_one(query) if not store: abort(404) series = series_archives_load(store) is_latest = False else: series = series_latest provider = queries.col_providers().find_one({"name": series_latest['provider_name']}, {"metadata": False}) if not provider: abort(404) if provider["enable"] is False: abort(307) dataset = queries.col_datasets().find_one({'provider_name': series_latest['provider_name'], "dataset_code": series_latest['dataset_code']}, {"metadata": False}) if not dataset: abort(404) if dataset["enable"] is False: abort(307) if is_debug: '''debug mode''' result_provider = render_template_string("{{ provider|pprint|safe }}", provider=provider) result_dataset = render_template_string("{{ dataset|pprint|safe }}", dataset=dataset) result_series = render_template_string("{{ series|pprint|safe }}", series=series) return current_app.jsonify(dict(provider=result_provider, dataset=result_dataset, series=result_series)) '''Load revisions < current version''' revisions = [] if "version" in series: query_revisions = {"slug": slug, "version": {"$lt": series["version"]}} count_values = len(series['values']) for store in queries.col_series_archives().find(query_revisions).sort('version', DESCENDING): series_rev = series_archives_load(store) values = series_rev['values'] empty_element = count_values - len(values) values.reverse() for i in range(empty_element): values.insert(0, None) revisions.append({ "last_update_ds": series_rev['last_update_ds'], "version": series_rev['version'], "values": values, "name": series_rev["name"], "url": url_for('.series-by-slug-version', slug=slug, version=series_rev['version'])}) else: series["version"] = 0 if not "last_update_ds" in series: series["last_update_ds"] = dataset["last_update"] series["last_update_widu"] = dataset["last_update"] #view_explorer = url_for('.explorer_s', series=slug, _external=True) url_provider = url_for('.explorer_p', provider=provider["slug"]) url_dataset = url_for('.explorer_d', dataset=dataset["slug"]) url_dataset_direct = url_for('.dataset-by-slug', slug=dataset["slug"], _external=True) url_series = url_for('.series-by-slug-version', slug=slug, version=series["version"], _external=True) url_series_latest = url_for('.series-by-slug-version', slug=slug, version=series_latest["version"]) url_series_plot = url_for('.ajax_series_plot', slug=slug) url_export_csv = url_for('.export-series-csv', slug=slug) dimension_filter = ".".join([series["dimensions"][key] for key in dataset["dimension_keys"]]) result = render_template( "series-unit-modal.html", url_provider=url_provider, url_dataset=url_dataset, url_dataset_direct=url_dataset_direct, url_series=url_series, url_series_latest=url_series_latest, url_series_plot=url_series_plot, url_export_csv=url_export_csv, series=series, is_modal=is_modal, provider=provider, dataset=dataset, is_latest=is_latest, revisions=revisions, #max_version=max_version, #view_explorer=view_explorer, dimension_filter=dimension_filter.upper(), #is_reverse=is_reverse, #obs_attributes_keys=list(set(obs_attributes_keys)), #obs_attributes_values=list(set(obs_attributes_values)), #revision_dates=list(set(revision_dates)), #max_revisions=max_revisions ) return result