def datasets_series_list(dataset): """Return all series for one dataset """ query = {'enable': True, 'slug': dataset} projection = {"_id": False, "values": False} dataset_doc = queries.col_datasets().find_one(query, projection) if not dataset_doc: abort(404) query = {'provider_name': dataset_doc['provider_name'], 'dataset_code': dataset_doc["dataset_code"]} projection = {"_id": False, "values": False} query = queries.complex_queries_series(query) limit = request.args.get('limit', default=1000, type=int) docs = queries.col_series().find(query, projection) if limit: docs= docs.limit(limit) _docs = [doc for doc in docs] return json_tools.json_response(_docs)
def datasets_series_list(dataset): """Return all series for one dataset """ query = {'enable': True, 'slug': dataset} projection = {"_id": False, "values": False} dataset_doc = queries.col_datasets().find_one(query, projection) if not dataset_doc: abort(404) query = { 'provider_name': dataset_doc['provider_name'], 'dataset_code': dataset_doc["dataset_code"] } projection = {"_id": False, "values": False} query = queries.complex_queries_series(query) docs = queries.col_series().find(query, projection) page = request.args.get('page', default=1, type=int) per_page = request.args.get('per_page', default=20, type=int) if per_page > 100: per_page = 100 pagination = queries.Pagination(docs, page, per_page) meta = { "page": pagination.page, "pages": pagination.pages, "per_page": pagination.per_page, "total": pagination.total, } _docs = [doc for doc in pagination.items] return json_tools.json_response(_docs, meta=meta)
def series_values_view(slug): query = {'slug': slug} projection = { "_id": False, "key": True, "slug": True, "values.value": True, "values.period": True, "provider_name": True, 'dataset_code': True } doc = queries.col_series().find_one(query, projection) if not doc: abort(404) query = { 'enable': True, "provider_name": doc["provider_name"], 'dataset_code': doc["dataset_code"] } projection = {"_id": False, "enable": True} dataset_doc = queries.col_datasets().find_one(query, projection) if not dataset_doc: abort(404) return json_response(json_util.dumps(doc, default=json_util.default))
def datasets_series_list(dataset): """Return all series for one dataset """ query = {'enable': True, 'slug': dataset} projection = {"_id": False, "values": False} dataset_doc = queries.col_datasets().find_one(query, projection) if not dataset_doc: abort(404) query = { 'provider_name': dataset_doc['provider_name'], 'dataset_code': dataset_doc["dataset_code"] } projection = {"_id": False, "values": False} query = queries.complex_queries_series(query) limit = request.args.get('limit', default=1000, type=int) docs = queries.col_series().find(query, projection) if limit: docs = docs.limit(limit) _docs = [doc for doc in docs] return json_tools.json_response(_docs)
def datasets_series_list(dataset): """Return all series for one dataset """ query = {'enable': True, 'slug': dataset} projection = {"_id": False, "values": False} dataset_doc = queries.col_datasets().find_one(query, projection) if not dataset_doc: abort(404) query = {'provider_name': dataset_doc['provider_name'], 'dataset_code': dataset_doc["dataset_code"]} projection = {"_id": False, "values": False} query = queries.complex_queries_series(query) docs = queries.col_series().find(query, projection) page = request.args.get('page', default=1, type=int) per_page = request.args.get('per_page', default=20, type=int) if per_page > 100: per_page = 100 pagination = queries.Pagination(docs, page, per_page) meta = { "page": pagination.page, "pages": pagination.pages, "per_page": pagination.per_page, "total": pagination.total, } _docs = [doc for doc in pagination.items] return json_tools.json_response(_docs, meta=meta)
def dataset_series_list_values(dataset): query = {'enable': True, 'slug': dataset} projection = {"_id": False, "provider_name": True, "dataset_code": True } doc = queries.col_datasets().find_one(query, projection) if not doc: abort(404) query = {"provider_name": doc["provider_name"], 'dataset_code': doc["dataset_code"]} projection = { "_id": False, "key": True, "slug": True, "name": True, "frequency": True, "start_date": True, "end_date": True, "dimensions": True, "attributes": True, "values.value": True, "values.period": True, } query = queries.complex_queries_series(query) docs = queries.col_series().find(query, projection) page = request.args.get('page', default=1, type=int) per_page = request.args.get('per_page', default=50, type=int) if per_page > 1000: per_page = 1000 pagination = queries.Pagination(docs, page, per_page) meta = { "page": pagination.page, "pages": pagination.pages, "per_page": pagination.per_page, "total": pagination.total, } _docs = [doc for doc in pagination.items] return json_tools.json_response_async(_docs, meta=meta)
def series_list_view(dataset): query = {'enable': True, 'slug': dataset} projection = {"_id": False, "values": False, "tags": False} dataset_doc = queries.col_datasets().find_one(query, projection) if not dataset_doc: abort(404) query = {'provider_name': dataset_doc['provider_name'], 'dataset_code': dataset_doc["dataset_code"]} projection = {"_id": False, "tags": False, "values": False} docs = queries.col_series().find(query, projection) return json_response(json_util.dumps(docs, default=json_util.default))
def dataset_values_view(slug): query = {'enable': True, 'slug': slug} projection = {"_id": False, "provider_name": True, "dataset_code": True} doc = queries.col_datasets().find_one(query, projection) if not doc: abort(404) #TODO: rendre obligatoire frequency query = { "provider_name": doc["provider_name"], 'dataset_code': doc["dataset_code"] } projection = { "_id": False, "key": True, "slug": True, "frequency": True, "values.value": True, "values.period": True, } #TODO: multiple value in dimension limit = request.args.get('limit', default=0, type=int) for r in request.args.lists(): if r[0] == 'limit': pass elif r[0] == 'frequency': query['frequency'] = r[1][0] else: #TODO: case regex query['dimensions.' + r[0]] = {'$regex': r[1][0]} #/api/v1/dataset/bis-pp-ls/values?Reference%20area=FR&Reference%20area=AU #query : {'provider_name': 'BIS', 'dataset_code': 'PP-LS', 'dimensions.Reference area': {'$regex': 'FR'}} docs = queries.col_series().find(query, projection).limit(limit) #TODO: Period + Value count = docs.count() - 1 print("dataset-values - query[%s] - result[%s]" % (query, count)) def generate(): yield "[" for i, row in enumerate(docs): yield json_util.dumps(row, default=json_util.default) if i < count: yield "," yield "]" return app.response_class(generate(), mimetype='application/json')
def dataset_unit_frequencies(dataset): query = {'enable': True, 'slug': dataset} projection = {"_id": False, "enable": False, "lock": False, "tags": False} doc = queries.col_datasets().find_one(query, projection) if not doc: abort(404) query = {"provider_name": doc["provider_name"], "dataset_code": doc["dataset_code"]} docs = queries.col_series().distinct("frequency", filter=query) return json_tools.json_response(docs)
def series_list_view(dataset): query = {'enable': True, 'slug': dataset} projection = {"_id": False, "values": False, "tags": False} dataset_doc = queries.col_datasets().find_one(query, projection) if not dataset_doc: abort(404) query = { 'provider_name': dataset_doc['provider_name'], 'dataset_code': dataset_doc["dataset_code"] } projection = {"_id": False, "tags": False, "values": False} docs = queries.col_series().find(query, projection) return json_response(json_util.dumps(docs, default=json_util.default))
def dataset_unit_frequencies(dataset): query = {'enable': True, 'slug': dataset} projection = {"_id": False, "enable": False, "lock": False, "tags": False} doc = queries.col_datasets().find_one(query, projection) if not doc: abort(404) query = { "provider_name": doc["provider_name"], "dataset_code": doc["dataset_code"] } docs = queries.col_series().distinct("frequency", filter=query) return json_tools.json_response(docs)
def dataset_values_view(slug): query = {'enable': True, 'slug': slug} projection = {"_id": False, "provider_name": True, "dataset_code": True } doc = queries.col_datasets().find_one(query, projection) if not doc: abort(404) #TODO: rendre obligatoire frequency query = {"provider_name": doc["provider_name"], 'dataset_code': doc["dataset_code"]} projection = { "_id": False, "key": True, "slug": True, "frequency": True, "values.value": True, "values.period": True, } #TODO: multiple value in dimension limit = request.args.get('limit', default=0, type=int) for r in request.args.lists(): if r[0] == 'limit': pass elif r[0] == 'frequency': query['frequency'] = r[1][0] else: #TODO: case regex query['dimensions.'+r[0]] = {'$regex': r[1][0]} #/api/v1/dataset/bis-pp-ls/values?Reference%20area=FR&Reference%20area=AU #query : {'provider_name': 'BIS', 'dataset_code': 'PP-LS', 'dimensions.Reference area': {'$regex': 'FR'}} docs = queries.col_series().find(query, projection).limit(limit) #TODO: Period + Value count = docs.count() -1 print("dataset-values - query[%s] - result[%s]" % (query, count)) def generate(): yield "[" for i, row in enumerate(docs): yield json_util.dumps(row, default=json_util.default) if i < count: yield "," yield "]" return app.response_class(generate(), mimetype='application/json')
def series_unit(series): query = {'slug': series} projection = {"_id": False} doc = queries.col_series().find_one(query, projection) if not doc: abort(404) query = {'enable': True, "provider_name": doc["provider_name"], 'dataset_code': doc["dataset_code"]} projection = {"_id": False, "enable": True} dataset_doc = queries.col_datasets().find_one(query, projection) if not dataset_doc: abort(404) return json_tools.json_response(doc)
def series_view(slug): query = {'slug': slug} projection = {"_id": False, "tags": False} doc = queries.col_series().find_one(query, projection) if not doc: abort(404) query = {'enable': True, "provider_name": doc["provider_name"], 'dataset_code': doc["dataset_code"]} projection = {"_id": False, "enable": True} dataset_doc = queries.col_datasets().find_one(query, projection) if not dataset_doc: abort(404) return json_response(json_util.dumps(doc, default=json_util.default))
def series_unit(series): query = {'slug': series} projection = {"_id": False} doc = queries.col_series().find_one(query, projection) if not doc: abort(404) query = { 'enable': True, "provider_name": doc["provider_name"], 'dataset_code': doc["dataset_code"] } projection = {"_id": False, "enable": True} dataset_doc = queries.col_datasets().find_one(query, projection) if not dataset_doc: abort(404) return json_tools.json_response(doc)
def dataset_series_list_values(dataset): query = {'enable': True, 'slug': dataset} projection = {"_id": False, "provider_name": True, "dataset_code": True} doc = queries.col_datasets().find_one(query, projection) if not doc: abort(404) query = { "provider_name": doc["provider_name"], 'dataset_code': doc["dataset_code"] } projection = { "_id": False, "key": True, "slug": True, "name": True, "frequency": True, "start_date": True, "end_date": True, "dimensions": True, "attributes": True, "values.value": True, "values.period": True, } query = queries.complex_queries_series(query) docs = queries.col_series().find(query, projection) page = request.args.get('page', default=1, type=int) per_page = request.args.get('per_page', default=50, type=int) if per_page > 1000: per_page = 1000 pagination = queries.Pagination(docs, page, per_page) meta = { "page": pagination.page, "pages": pagination.pages, "per_page": pagination.per_page, "total": pagination.total, } _docs = [doc for doc in pagination.items] return json_tools.json_response_async(_docs, meta=meta)
def _EVIEWS_dataset_values(provider_name=None, dataset_code=None): #TODO: use dataset slug #TODO: dataset enable #TODO: required frequency query = {} query['provider_name'] = provider_name query['dataset_code'] = dataset_code limit = request.args.get('limit', default=0, type=int) for r in request.args.lists(): if r[0] == 'limit': pass elif r[0] == 'frequency': query['frequency'] = r[1][0] else: query['dimensions.'+r[0]] = {'$regex': r[1][0]} start = time.time() cursor = queries.col_series().find(query).limit(limit) if cursor.count() == 0: dates, series_keys, series_names, values = [], [], [], [] else: dates, series_keys, series_names, values = dataset_series_list(cursor) context = { "dates": dates, "series_keys": series_keys, "series_names": series_names, "values": values } end = time.time() - start msg = "eviews-series - provider[%s] - dataset[%s] : %.3f" current_app.logger.info(msg % (provider_name, dataset_code, end)) return render_template("eviews.html", **context)
def series_view(slug): query = {'slug': slug} projection = {"_id": False, "tags": False} doc = queries.col_series().find_one(query, projection) if not doc: abort(404) query = { 'enable': True, "provider_name": doc["provider_name"], 'dataset_code': doc["dataset_code"] } projection = {"_id": False, "enable": True} dataset_doc = queries.col_datasets().find_one(query, projection) if not dataset_doc: abort(404) values = [v["value"] for v in doc["values"]] doc["values"] = values return json_response(json_util.dumps([doc], default=json_util.default))
def series_multi(series): query = {'slug': {"$in": series.split("+")}} projection = {"_id": False} docs = [doc for doc in queries.col_series().find(query, projection)] return json_tools.json_response(docs)
def _dataset_values(provider_name=None, dataset_code=None, frequency=None, separator='dot'): query = {} query['provider_name'] = provider_name query['dataset_code'] = dataset_code _format = request.args.get('format', default='html') limit = request.args.get('limit', default=0, type=int) query['frequency'] = frequency separator = request.args.get('separator', default=separator) if not separator in ['dot', 'comma']: abort( 400, "separator [%s] not supported. valid separator[dot, comma]" % separator) query = queries.complex_queries_series(query, search_attributes=False, bypass_args=[ 'limit', 'tags', 'provider', 'dataset', 'frequency', 'separator', 'format' ]) start = time.time() cursor = queries.col_series().find(query) max_limit = current_app.config.get("WIDUKIND_DISPLAY_LIMIT", 1000) if limit: cursor = cursor.limit(limit) else: if cursor.count() > max_limit: abort( 400, "The number of result exceeds the allowed limit [%s]. You must use the limit parameter in the query." % max_limit) if cursor.count() == 0: dates, series_keys, series_names, values = [], [], [], [] abort(400, "no data found") else: dates, series_keys, series_names, values = _dataset_series_list( cursor, frequency, separator=separator) context = { "dates": dates, "series_keys": series_keys, "series_names": series_names, "values": values } end = time.time() - start msg = "eviews-series - provider[%s] - dataset[%s] - frequency[%s] - limit[%s] - duration[%.3f]" current_app.logger.info( msg % (provider_name, dataset_code, frequency, limit, end)) response_str = render_template("html/values.html", **context) """ TODO: use lang browser pour choix separator ? TODO: header lang ? """ EXTENSIONS_MAP = { "excel": ("xls", "application/vnd.ms-excel"), "xls": ("xls", "application/vnd.ms-excel"), "xlsx": ("xlsx", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"), "calc": ("ods", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"), } if _format and _format in EXTENSIONS_MAP: response = make_response(response_str) _ext = EXTENSIONS_MAP[_format][0] content_type = EXTENSIONS_MAP[_format][1] filename = generate_filename( provider_name=provider_name, dataset_code=dataset_code, #key=key, #slug=slug, prefix="series-list") filename = "%s.%s" % (filename, _ext) response.headers['Content-Type'] = content_type response.headers[ "Content-disposition"] = "attachment; filename=%s" % filename #response.content_length = fileobj.length #TODO: response.last_modified = fileobj.upload_date #TODO: response.set_etag(fileobj.md5) response.make_conditional(request) return response return response_str
def _dataset_values(provider_name=None, dataset_code=None, frequency=None, separator='dot'): query = {} query['provider_name'] = provider_name query['dataset_code'] = dataset_code _format = request.args.get('format', default='html') limit = request.args.get('limit', default=0, type=int) query['frequency'] = frequency separator = request.args.get('separator', default=separator) if not separator in ['dot', 'comma']: abort(400, "separator [%s] not supported. valid separator[dot, comma]" % separator) query = queries.complex_queries_series(query, search_attributes=False, bypass_args=['limit', 'tags', 'provider', 'dataset', 'frequency', 'separator', 'format']) start = time.time() cursor = queries.col_series().find(query) max_limit = current_app.config.get("WIDUKIND_DISPLAY_LIMIT", 1000) if limit: cursor = cursor.limit(limit) else: if cursor.count() > max_limit: abort(400, "The number of result exceeds the allowed limit [%s]. You must use the limit parameter in the query." % max_limit) if cursor.count() == 0: dates, series_keys, series_names, values = [], [], [], [] abort(400, "no data found") else: dates, series_keys, series_names, values = _dataset_series_list(cursor, frequency, separator=separator) context = { "dates": dates, "series_keys": series_keys, "series_names": series_names, "values": values } end = time.time() - start msg = "eviews-series - provider[%s] - dataset[%s] - frequency[%s] - limit[%s] - duration[%.3f]" current_app.logger.info(msg % (provider_name, dataset_code, frequency, limit, end)) response_str = render_template("html/values.html", **context) """ TODO: use lang browser pour choix separator ? TODO: header lang ? """ EXTENSIONS_MAP = { "excel": ("xls", "application/vnd.ms-excel"), "xls": ("xls", "application/vnd.ms-excel"), "xlsx": ("xlsx", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"), "calc": ("ods", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"), } if _format and _format in EXTENSIONS_MAP: response = make_response(response_str) _ext = EXTENSIONS_MAP[_format][0] content_type = EXTENSIONS_MAP[_format][1] filename = generate_filename(provider_name=provider_name, dataset_code=dataset_code, #key=key, #slug=slug, prefix="series-list") filename = "%s.%s" % (filename, _ext) response.headers['Content-Type'] = content_type response.headers["Content-disposition"] = "attachment; filename=%s" % filename #response.content_length = fileobj.length #TODO: response.last_modified = fileobj.upload_date #TODO: response.set_etag(fileobj.md5) response.make_conditional(request) return response return response_str