def collections(request): if not request.is_ajax(): return HttpResponseBadRequest() loc = request.GET.get('loc', '') top_level = request.GET.get('top', 0) zotero_key = request.user.get_profile().zotero_key collections = utils.get_collections(zotero_key, loc, int(top_level)) return HttpResponse(json.dumps(collections), mimetype='application/json')
def __init__(self, ): self.uri = "https://w3id.org/dggs/tb16pix" self.label = "Testbed 16 Pix Discrete Global Grid" self.description = """This is an instance of the [Open Geospatial Consortium (OGC)](https://www.ogc.org/) 's "[OGC API - Features](http://www.opengis.net/doc/IS/ogcapi-features-1/1.0)" API that delivers the authoritative content for the *Testbed 16 Pix* (TB16Pix) which is a [Discreet Global Grid](http://docs.opengeospatial.org/as/15-104r5/15-104r5.html#4), that is, a multi-layered, tessellated, set of spatial grid cells used for position identification on the Earth's surface. This API and the data within it have been created for the OGC's Testbed 16 which is a multi-organisation interoperability experiment.""" self.parts = get_collections() self.distributions = [ (URI_BASE_DATASET.sparql, "SPARQL"), (URI_BASE_DATASET, "Linked Data API"), ]
def collections(): collections = get_collections() return ContainerRenderer( request, "https://w3id.org/dggs/tb16pix/grid/", "Collections", "DGGs are made of hierarchical layers of Cell geometries. In TB16Pix, these layers are called Grids. " "Additionally, this API delivers TB16Pix Zones in Collections too.", "https://w3id.org/dggs/tb16pix", "TB16Pix Dataset", collections, len(collections) ).render()
def gen_config(sum_type, config_filename, peer_folder, sums=None): """Given a summary type string, e.g., 'topicwords', and a list of (summary, [comparisons]) tuples, generate the appropriate config.""" sums = sums or [(i, models) for i, _, models, _ in get_collections(False)] with open(config_filename, 'w') as f: f.write('<ROUGE-EVAL version="1.0">\n') for i, comparisons in sums: f.write('<EVAL ID="%d">\n' % (i+1)) f.write('<PEER-ROOT>%s</PEER-ROOT>\n' % peer_folder) f.write('<MODEL-ROOT>models</MODEL-ROOT>\n') f.write('<INPUT-FORMAT TYPE="SPL"></INPUT-FORMAT>\n') f.write('<PEERS><P ID="%s">summary%02d.txt</P></PEERS>\n' % (sum_type, i)) f.write('<MODELS>\n') f.write('\n'.join(['<M ID="S%s">%s</M>' % (j, summ) for j, summ in enumerate(comparisons)])) f.write('\n</MODELS>\n') f.write('</EVAL>\n') f.write('</ROUGE-EVAL>')
def _calculate_sales(match_condition, aggregate_date_type, use_test_db=False): """根据 match_condition,计算出销售额""" collections = get_collections(use_test_db) order_coll = collections[settings.ORDER_COLL] aggregate_coll = collections[settings.AGGREGATE_COLL] if aggregate_date_type == settings.DATE_TYPE_MINUTELY: result = list( order_coll.aggregate([{ '$match': match_condition }, { '$group': { '_id': None, settings.SALES: { '$sum': '${}'.format(settings.PRICE) } } }])) time_key = settings.CREATED_TIME else: result = list( aggregate_coll.aggregate([{ '$match': match_condition }, { '$group': { '_id': None, settings.SALES: { '$sum': '${}'.format(settings.PRICE) } } }])) time_key = settings.TIME_START # 如果没有匹配的记录,那么销售额是 0 sales = 0 if not result else result[0][settings.SALES] aggregate_coll.update_one( { settings.DATE_TYPE: aggregate_date_type, time_key: match_condition[time_key]['$gte'], }, {'$set': { settings.SALES: sales }}, upsert=True)
def create_order(use_test_db=False, created_time=None, date_type=None): """生成数据价格为 1 的,创建时间为当前时间的订单""" collections = get_collections(use_test_db) order_coll = collections[settings.ORDER_COLL] aggregate_coll = collections[settings.AGGREGATE_COLL] if date_type: aggregate_coll.insert({ settings.TIME_START: created_time or datetime.datetime.utcnow(), settings.PRICE: 1, settings.DATE_TYPE: date_type }) else: order_coll.insert({ settings.CREATED_TIME: created_time or datetime.datetime.utcnow(), settings.PRICE: 1, })
def export_all(): """Exports all collections to DATA_PATH/export folder""" db = utils.get_db() collection_names = utils.get_collections(db) status = True failed_writes = [] for cname in collection_names: try: collection = db[cname] data = collection.find() write_status = utils.write_json(cname, data) if write_status: logging.info("Wrote", cname, "to", cname + ".json") else: failed_writes.append(cname) except: logging.error("export_all(): Export failed!") traceback.print_exc() status = False if len(failed_writes) > 0: print("Failed to write", failed_writes) status = False return status