def ajax_plot_series(slug): series = queries.col_series().find_one({"slug": slug}) if not series: abort(404) ds_projection = {"enable": True} dataset = queries.col_datasets().find_one({"provider_name": series["provider_name"], "dataset_code": series["dataset_code"]}, ds_projection) if not dataset: abort(404) if dataset["enable"] is False: abort(307) meta = { "provider_name": series["provider_name"], "dataset_code": series["dataset_code"], "name": series["name"], "key": series["key"], "slug": series["slug"], } datas = [] #datas.append(("Dates", "Values")) for period, period_ts, value in datas_from_series(series): datas.append({"period": period, "period_ts": str(period_ts), "value": value}) return json_tools.json_response(datas, meta)
def ajax_plot_series(slug): series = queries.col_series().find_one({"slug": slug}) if not series: abort(404) ds_projection = {"enable": True} dataset = queries.col_datasets().find_one( { "provider_name": series["provider_name"], "dataset_code": series["dataset_code"] }, ds_projection) if not dataset: abort(404) if dataset["enable"] is False: abort(307) meta = { "provider_name": series["provider_name"], "dataset_code": series["dataset_code"], "name": series["name"], "key": series["key"], "slug": series["slug"], } datas = [] #datas.append(("Dates", "Values")) for period, period_ts, value in datas_from_series(series): datas.append({ "period": period, "period_ts": str(period_ts), "value": value }) return json_tools.json_response(datas, meta)
def ajax_cart_view(): is_ajax = request.args.get('json') #or request.is_xhr if not is_ajax: return render_template("series-cart.html") datas = {"rows": None, "total": 0} projection = { "dimensions": False, "attributes": False, "release_dates": False, "revisions": False, "values": False, } cart = session.get("cart", None) if cart: series_slug = [c for c in cart] series = queries.col_series().find({"slug": { "$in": series_slug }}, projection=projection) docs = list(series) for s in docs: if not "version" in s: s["version"] = 0 s['view'] = url_for('.series-by-slug', slug=s['slug'], modal=1) dataset_slug = slugify("%s-%s" % (s["provider_name"], s["dataset_code"]), word_boundary=False, save_order=True) s['view_dataset'] = url_for('.dataset-by-slug', slug=dataset_slug, modal=1) s['dataset_slug'] = dataset_slug s['export_csv'] = url_for('.export-series-csv', slug=s['slug']) s['url_series_plot'] = url_for('.ajax_series_plot', slug=s['slug']) s['url_cart_remove'] = url_for('.ajax-cart-remove', slug=s['slug']) s['frequency_txt'] = s['frequency'] if s['frequency'] in constants.FREQUENCIES_DICT: s['frequency_txt'] = constants.FREQUENCIES_DICT[s['frequency']] datas["rows"] = docs return current_app.jsonify(datas["rows"])
def stats_series(): cursor = queries.col_providers().find({}, {"name": True}) provider_names = [doc["name"] for doc in cursor] result = [] total = 0 for provider in provider_names: r = {"_id": provider, "count": queries.col_series().count({"provider_name": provider})} result.append(r) total += r["count"] #result = list(queries.col_series().aggregate([{"$group": {"_id": "$provider_name", "count": {"$sum": 1}}}, {"$sort": {"count": -1} }], allowDiskUse=True)) return render_template("admin/stats-series.html", result=result, total=total)
def ajax_cart_view(): is_ajax = request.args.get('json') #or request.is_xhr if not is_ajax: return render_template("series-cart.html") datas = {"rows": None, "total": 0} projection = { "dimensions": False, "attributes": False, "release_dates": False, "revisions": False, "values": False, } cart = session.get("cart", None) if cart: series_slug = [c for c in cart] series = queries.col_series().find({"slug": {"$in": series_slug}}, projection=projection) docs = list(series) for s in docs: if not "version" in s: s["version"] = 0 s['view'] = url_for('.series-by-slug', slug=s['slug'], modal=1) dataset_slug = slugify("%s-%s" % (s["provider_name"], s["dataset_code"]), word_boundary=False, save_order=True) s['view_dataset'] = url_for('.dataset-by-slug', slug=dataset_slug, modal=1) s['dataset_slug'] = dataset_slug s['export_csv'] = url_for('.export-series-csv', slug=s['slug']) s['url_series_plot'] = url_for('.ajax_series_plot', slug=s['slug']) s['url_cart_remove'] = url_for('.ajax-cart-remove', slug=s['slug']) s['frequency_txt'] = s['frequency'] if s['frequency'] in constants.FREQUENCIES_DICT: s['frequency_txt'] = constants.FREQUENCIES_DICT[s['frequency']] datas["rows"] = docs return current_app.jsonify(datas["rows"])
def ajax_dataset_frequencies(dataset): projection = {"_id": False, "lock": False, "tags": False} doc = queries.get_dataset(dataset, projection) query = {"provider_name": doc["provider_name"], "dataset_code": doc["dataset_code"]} if "metadata" in doc and doc["metadata"].get("frequencies"): frequencies = doc["metadata"].get("frequencies") else: frequencies = queries.col_series().distinct("frequency", filter=query) freqs = [] for freq in frequencies: if freq in constants.FREQUENCIES_DICT: freqs.append({"value": freq, "text": constants.FREQUENCIES_DICT[freq]}) else: freqs.append({"value": freq, "text": freq}) return json_tools.json_response(freqs)
def stats_series(): cursor = queries.col_providers().find({}, {"name": True}) provider_names = [doc["name"] for doc in cursor] result = [] total = 0 for provider in provider_names: r = { "_id": provider, "count": queries.col_series().count({"provider_name": provider}) } result.append(r) total += r["count"] #result = list(queries.col_series().aggregate([{"$group": {"_id": "$provider_name", "count": {"$sum": 1}}}, {"$sort": {"count": -1} }], allowDiskUse=True)) return render_template("admin/stats-series.html", result=result, total=total)
def ajax_dataset_frequencies(dataset): projection = {"_id": False, "lock": False, "tags": False} doc = queries.get_dataset(dataset, projection) query = { "provider_name": doc["provider_name"], "dataset_code": doc["dataset_code"] } if "metadata" in doc and doc["metadata"].get("frequencies"): frequencies = doc["metadata"].get("frequencies") else: frequencies = queries.col_series().distinct("frequency", filter=query) freqs = [] for freq in frequencies: if freq in constants.FREQUENCIES_DICT: freqs.append({ "value": freq, "text": constants.FREQUENCIES_DICT[freq] }) else: freqs.append({"value": freq, "text": freq}) return json_tools.json_response(freqs)
def export_series_csv(slug=None): """ http://127.0.0.1:8081/views/export/series/insee-ipch-2015-fr-coicop-001759971 http://127.0.0.1:8081/views/export/series/insee-ipch-2015-fr-coicop-001759971+insee-ipch-2015-fr-coicop-001762151 http://127.0.0.1:8081/views/export/series/insee-ipch-2015-fr-coicop-001759971+bis-cbs-q-s-5a-4b-f-b-a-a-lc1-a-1c """ if not slug: slug = request.args.get('slug') if not slug: abort(404, "slug is required parameter") if "+" in slug: query = {'slug': {"$in": slug.split("+")}} else: query = {'slug': slug} series_list = [ doc for doc in queries.col_series().find(query, { "tags": False, "notes": False }) ] #ds_slugs = [] #for doc in series_list: # dataset_slug = slugify("%s-%s" % (doc["provider_name"], doc["dataset_code"]), word_boundary=False, save_order=True) # ds_slugs.append(dataset_slug) # doc["dataset_slug"] = dataset_slug #datasets = {ds['slug']: ds for ds in queries.col_datasets().find({"slug": {"$in": ds_slugs}})} fp = StringIO() writer = csv.writer(fp, quoting=csv.QUOTE_NONNUMERIC) headers = [ "provider", "dataset_code", "key", "slug", "name", "frequency", "period", "value" ] values = [] for doc in series_list: #dataset_slug = doc["dataset_slug"] if not "version" in doc: doc["version"] = 0 provider_name = doc["provider_name"] dataset_code = doc["dataset_code"] key = doc["key"] slug = doc["slug"] name = doc["name"] frequency = doc["frequency"] """ _dimensions = [] for dim, dim_key in doc["dimensions"].items(): dim_title = datasets[dataset_slug]["concepts"].get(dim, dim) dim_value = datasets[dataset_slug]["codelists"].get(dim, {}).get(dim_key, dim_key) if not dim_title in headers: headers.append(dim_title) _dimensions.append(dim_value) """ for val in doc['values']: values.append([ provider_name, dataset_code, key, slug, name, frequency, val["period"], val["value"], ] # + _dimensions ) writer.writerow(headers) writer.writerows(values) fp.seek(0) return send_file_csv(fp, mimetype='text/csv')
def ajax_explorer_datas(): limit = request.args.get('limit', default=100, type=int) if limit > 1000: limit = 1000 #7 669 115 octets pour 1000 series projection = { "_id": False, "dimensions": False, "attributes": False, "values.revisions": False, "notes": False, "tags": False } query = OrderedDict() provider_slug = request.args.get('provider') dataset_slug = request.args.get('dataset') #series_slug = request.args.get('series') search = request.args.get('search') is_eurostat = False if dataset_slug: dataset = queries.get_dataset(dataset_slug) query["provider_name"] = dataset["provider_name"] #query["dataset_code"] = dataset["dataset_code"] if dataset["provider_name"] == "EUROSTAT": is_eurostat = True elif provider_slug: provider = queries.get_provider(provider_slug) query["provider_name"] = provider["name"] if provider["name"] == "EUROSTAT": is_eurostat = True if search: query["$text"] = {"$search": search.strip()} projection['score'] = {'$meta': 'textScore'} disabled_datasets = [] if dataset_slug: query["dataset_code"] = dataset["dataset_code"] else: ds_enabled_query = {"enable": False} if "provider_name" in query: ds_enabled_query["provider_name"] = query["provider_name"] disabled_datasets = [ doc["dataset_code"] for doc in queries.col_datasets().find( ds_enabled_query, {"dataset_code": True}) ] query = complex_queries_series(query) cursor = queries.col_series().find(dict(query), projection) if search: cursor = cursor.sort([('score', {'$meta': 'textScore'})]) #else: # query = {"slug": series_slug} # cursor = queries.col_series().find(dict(query), projection) if limit: cursor = cursor.limit(limit) if is_eurostat: count = 0 else: count = cursor.count() series_list = [doc for doc in cursor] rows = [] for s in series_list: if disabled_datasets and s["dataset_code"] in disabled_datasets: continue if not "version" in s: s["version"] = 0 s['start_date'] = s["values"][0]["period"] s['end_date'] = s["values"][-1]["period"] values = [{ "period": v["period"], "value": v["value"] } for v in s['values']] del s["values"] s["values"] = values s['view'] = url_for('.series-by-slug', slug=s['slug'], modal=1) dataset_slug = slugify("%s-%s" % (s["provider_name"], s["dataset_code"]), word_boundary=False, save_order=True) s['view_dataset'] = url_for('.dataset-by-slug', slug=dataset_slug, modal=1) #s["view_explorer"] = url_for('.explorer_s', series=s['slug'], _external=True) s['dataset_slug'] = dataset_slug s['export_csv'] = url_for('.export-series-csv', slug=s['slug']) s['url_series_plot'] = url_for('.ajax_series_plot', slug=s['slug']) s['url_cart_add'] = url_for('.ajax-cart-add', slug=s['slug']) #TODO: s['url_dataset'] = url_for('.dataset', id=s['_id']) s['frequency_txt'] = s['frequency'] if s['frequency'] in constants.FREQUENCIES_DICT: s['frequency_txt'] = constants.FREQUENCIES_DICT[s['frequency']] rows.append(s) return json_tools.json_response(rows, {"total": count})
def series_with_slug(slug, version): """ Dans tous les cas: - charger la version latest - charger toutes les révisions antérieurs à la version latest """ is_modal = request.args.get('modal', default=0, type=int) is_debug = request.args.get('debug') #_version = request.args.get('version', default="latest") is_latest = True query = {"slug": slug} '''Load always latest series from col series''' series_latest = queries.col_series().find_one(query) if not series_latest: abort(404) if version >= 0 and version != series_latest['version']: query['version'] = version store = queries.col_series_archives().find_one(query) if not store: abort(404) series = series_archives_load(store) is_latest = False else: series = series_latest provider = queries.col_providers().find_one( {"name": series_latest['provider_name']}, {"metadata": False}) if not provider: abort(404) if provider["enable"] is False: abort(307) dataset = queries.col_datasets().find_one( { 'provider_name': series_latest['provider_name'], "dataset_code": series_latest['dataset_code'] }, {"metadata": False}) if not dataset: abort(404) if dataset["enable"] is False: abort(307) if is_debug: '''debug mode''' result_provider = render_template_string("{{ provider|pprint|safe }}", provider=provider) result_dataset = render_template_string("{{ dataset|pprint|safe }}", dataset=dataset) result_series = render_template_string("{{ series|pprint|safe }}", series=series) return current_app.jsonify( dict(provider=result_provider, dataset=result_dataset, series=result_series)) '''Load revisions < current version''' revisions = [] if "version" in series: query_revisions = {"slug": slug, "version": {"$lt": series["version"]}} count_values = len(series['values']) for store in queries.col_series_archives().find(query_revisions).sort( 'version', DESCENDING): series_rev = series_archives_load(store) values = series_rev['values'] empty_element = count_values - len(values) values.reverse() for i in range(empty_element): values.insert(0, None) revisions.append({ "last_update_ds": series_rev['last_update_ds'], "version": series_rev['version'], "values": values, "name": series_rev["name"], "url": url_for('.series-by-slug-version', slug=slug, version=series_rev['version']) }) else: series["version"] = 0 if not "last_update_ds" in series: series["last_update_ds"] = dataset["last_update"] series["last_update_widu"] = dataset["last_update"] #view_explorer = url_for('.explorer_s', series=slug, _external=True) url_provider = url_for('.explorer_p', provider=provider["slug"]) url_dataset = url_for('.explorer_d', dataset=dataset["slug"]) url_dataset_direct = url_for('.dataset-by-slug', slug=dataset["slug"], _external=True) url_series = url_for('.series-by-slug-version', slug=slug, version=series["version"], _external=True) url_series_latest = url_for('.series-by-slug-version', slug=slug, version=series_latest["version"]) url_series_plot = url_for('.ajax_series_plot', slug=slug) url_export_csv = url_for('.export-series-csv', slug=slug) dimension_filter = ".".join( [series["dimensions"][key] for key in dataset["dimension_keys"]]) result = render_template( "series-unit-modal.html", url_provider=url_provider, url_dataset=url_dataset, url_dataset_direct=url_dataset_direct, url_series=url_series, url_series_latest=url_series_latest, url_series_plot=url_series_plot, url_export_csv=url_export_csv, series=series, is_modal=is_modal, provider=provider, dataset=dataset, is_latest=is_latest, revisions=revisions, #max_version=max_version, #view_explorer=view_explorer, dimension_filter=dimension_filter.upper(), #is_reverse=is_reverse, #obs_attributes_keys=list(set(obs_attributes_keys)), #obs_attributes_values=list(set(obs_attributes_values)), #revision_dates=list(set(revision_dates)), #max_revisions=max_revisions ) return result
def export_series_csv(slug=None): """ http://127.0.0.1:8081/views/export/series/insee-ipch-2015-fr-coicop-001759971 http://127.0.0.1:8081/views/export/series/insee-ipch-2015-fr-coicop-001759971+insee-ipch-2015-fr-coicop-001762151 http://127.0.0.1:8081/views/export/series/insee-ipch-2015-fr-coicop-001759971+bis-cbs-q-s-5a-4b-f-b-a-a-lc1-a-1c """ if not slug: slug = request.args.get('slug') if not slug: abort(404, "slug is required parameter") if "+" in slug: query = {'slug': {"$in": slug.split("+")}} else: query = {'slug': slug} series_list = [doc for doc in queries.col_series().find(query, {"tags": False, "notes": False})] #ds_slugs = [] #for doc in series_list: # dataset_slug = slugify("%s-%s" % (doc["provider_name"], doc["dataset_code"]), word_boundary=False, save_order=True) # ds_slugs.append(dataset_slug) # doc["dataset_slug"] = dataset_slug #datasets = {ds['slug']: ds for ds in queries.col_datasets().find({"slug": {"$in": ds_slugs}})} fp = StringIO() writer = csv.writer(fp, quoting=csv.QUOTE_NONNUMERIC) headers = ["provider", "dataset_code", "key", "slug", "name", "frequency", "period", "value"] values = [] for doc in series_list: #dataset_slug = doc["dataset_slug"] if not "version" in doc: doc["version"] = 0 provider_name = doc["provider_name"] dataset_code = doc["dataset_code"] key = doc["key"] slug = doc["slug"] name = doc["name"] frequency = doc["frequency"] """ _dimensions = [] for dim, dim_key in doc["dimensions"].items(): dim_title = datasets[dataset_slug]["concepts"].get(dim, dim) dim_value = datasets[dataset_slug]["codelists"].get(dim, {}).get(dim_key, dim_key) if not dim_title in headers: headers.append(dim_title) _dimensions.append(dim_value) """ for val in doc['values']: values.append([ provider_name, dataset_code, key, slug, name, frequency, val["period"], val["value"], ]# + _dimensions ) writer.writerow(headers) writer.writerows(values) fp.seek(0) return send_file_csv(fp, mimetype='text/csv')
def ajax_explorer_datas(): limit = request.args.get('limit', default=100, type=int) if limit > 1000: limit = 1000 #7 669 115 octets pour 1000 series projection = { "_id": False, "dimensions": False, "attributes": False, "values.revisions": False, "notes": False, "tags": False } query = OrderedDict() provider_slug = request.args.get('provider') dataset_slug = request.args.get('dataset') #series_slug = request.args.get('series') search = request.args.get('search') is_eurostat = False if dataset_slug: dataset = queries.get_dataset(dataset_slug) query["provider_name"] = dataset["provider_name"] #query["dataset_code"] = dataset["dataset_code"] if dataset["provider_name"] == "EUROSTAT": is_eurostat = True elif provider_slug: provider = queries.get_provider(provider_slug) query["provider_name"] = provider["name"] if provider["name"] == "EUROSTAT": is_eurostat = True if search: query["$text"] = {"$search": search.strip()} projection['score'] = {'$meta': 'textScore'} disabled_datasets = [] if dataset_slug: query["dataset_code"] = dataset["dataset_code"] else: ds_enabled_query = {"enable": False} if "provider_name" in query: ds_enabled_query["provider_name"] = query["provider_name"] disabled_datasets = [doc["dataset_code"] for doc in queries.col_datasets().find(ds_enabled_query, {"dataset_code": True})] query = complex_queries_series(query) cursor = queries.col_series().find(dict(query), projection) if search: cursor = cursor.sort([('score', {'$meta': 'textScore'})]) #else: # query = {"slug": series_slug} # cursor = queries.col_series().find(dict(query), projection) if limit: cursor = cursor.limit(limit) if is_eurostat: count = 0 else: count = cursor.count() series_list = [doc for doc in cursor] rows = [] for s in series_list: if disabled_datasets and s["dataset_code"] in disabled_datasets: continue if not "version" in s: s["version"] = 0 s['start_date'] = s["values"][0]["period"] s['end_date'] = s["values"][-1]["period"] values = [{"period": v["period"], "value": v["value"]} for v in s['values']] del s["values"] s["values"] = values s['view'] = url_for('.series-by-slug', slug=s['slug'], modal=1) dataset_slug = slugify("%s-%s" % (s["provider_name"], s["dataset_code"]), word_boundary=False, save_order=True) s['view_dataset'] = url_for('.dataset-by-slug', slug=dataset_slug, modal=1) #s["view_explorer"] = url_for('.explorer_s', series=s['slug'], _external=True) s['dataset_slug'] = dataset_slug s['export_csv'] = url_for('.export-series-csv', slug=s['slug']) s['url_series_plot'] = url_for('.ajax_series_plot', slug=s['slug']) s['url_cart_add'] = url_for('.ajax-cart-add', slug=s['slug']) #TODO: s['url_dataset'] = url_for('.dataset', id=s['_id']) s['frequency_txt'] = s['frequency'] if s['frequency'] in constants.FREQUENCIES_DICT: s['frequency_txt'] = constants.FREQUENCIES_DICT[s['frequency']] rows.append(s) return json_tools.json_response(rows, {"total": count})
def series_with_slug(slug, version): """ Dans tous les cas: - charger la version latest - charger toutes les révisions antérieurs à la version latest """ is_modal = request.args.get('modal', default=0, type=int) is_debug = request.args.get('debug') #_version = request.args.get('version', default="latest") is_latest = True query = {"slug": slug} '''Load always latest series from col series''' series_latest = queries.col_series().find_one(query) if not series_latest: abort(404) if version >= 0 and version != series_latest['version']: query['version'] = version store = queries.col_series_archives().find_one(query) if not store: abort(404) series = series_archives_load(store) is_latest = False else: series = series_latest provider = queries.col_providers().find_one({"name": series_latest['provider_name']}, {"metadata": False}) if not provider: abort(404) if provider["enable"] is False: abort(307) dataset = queries.col_datasets().find_one({'provider_name': series_latest['provider_name'], "dataset_code": series_latest['dataset_code']}, {"metadata": False}) if not dataset: abort(404) if dataset["enable"] is False: abort(307) if is_debug: '''debug mode''' result_provider = render_template_string("{{ provider|pprint|safe }}", provider=provider) result_dataset = render_template_string("{{ dataset|pprint|safe }}", dataset=dataset) result_series = render_template_string("{{ series|pprint|safe }}", series=series) return current_app.jsonify(dict(provider=result_provider, dataset=result_dataset, series=result_series)) '''Load revisions < current version''' revisions = [] if "version" in series: query_revisions = {"slug": slug, "version": {"$lt": series["version"]}} count_values = len(series['values']) for store in queries.col_series_archives().find(query_revisions).sort('version', DESCENDING): series_rev = series_archives_load(store) values = series_rev['values'] empty_element = count_values - len(values) values.reverse() for i in range(empty_element): values.insert(0, None) revisions.append({ "last_update_ds": series_rev['last_update_ds'], "version": series_rev['version'], "values": values, "name": series_rev["name"], "url": url_for('.series-by-slug-version', slug=slug, version=series_rev['version'])}) else: series["version"] = 0 if not "last_update_ds" in series: series["last_update_ds"] = dataset["last_update"] series["last_update_widu"] = dataset["last_update"] #view_explorer = url_for('.explorer_s', series=slug, _external=True) url_provider = url_for('.explorer_p', provider=provider["slug"]) url_dataset = url_for('.explorer_d', dataset=dataset["slug"]) url_dataset_direct = url_for('.dataset-by-slug', slug=dataset["slug"], _external=True) url_series = url_for('.series-by-slug-version', slug=slug, version=series["version"], _external=True) url_series_latest = url_for('.series-by-slug-version', slug=slug, version=series_latest["version"]) url_series_plot = url_for('.ajax_series_plot', slug=slug) url_export_csv = url_for('.export-series-csv', slug=slug) dimension_filter = ".".join([series["dimensions"][key] for key in dataset["dimension_keys"]]) result = render_template( "series-unit-modal.html", url_provider=url_provider, url_dataset=url_dataset, url_dataset_direct=url_dataset_direct, url_series=url_series, url_series_latest=url_series_latest, url_series_plot=url_series_plot, url_export_csv=url_export_csv, series=series, is_modal=is_modal, provider=provider, dataset=dataset, is_latest=is_latest, revisions=revisions, #max_version=max_version, #view_explorer=view_explorer, dimension_filter=dimension_filter.upper(), #is_reverse=is_reverse, #obs_attributes_keys=list(set(obs_attributes_keys)), #obs_attributes_values=list(set(obs_attributes_values)), #revision_dates=list(set(revision_dates)), #max_revisions=max_revisions ) return result