def lookup(dataset): dataset = Dataset.find(dataset) readonly = validators.StringBool(if_empty=False, if_missing=False)\ .to_python(request.args.get('readonly')) readonly = readonly if authz.logged_in() else True data = request_content() if response_format() != 'json': return Response("Not implemented!", status=400) try: alias = Alias.lookup(dataset, data, request.account, readonly=readonly) if alias is None: return jsonify({ 'is_matched': False, 'entity': None, 'name': data.get('name'), 'dataset': dataset.name }, status=404) if isinstance(alias, Entity): return jsonify({ 'is_matched': True, 'entity': alias, 'name': data.get('name'), 'dataset': dataset.name }, status=200) db.session.commit() status = 200 if alias.is_matched else 404 status = 418 if alias.is_invalid else status return jsonify(alias, status=status) except Invalid, inv: return handle_invalid(inv, index, data=data, args=[dataset.name])
def import_upload(dataset_name, id, account_id, entity_col, alias_col): dataset = Dataset.find(dataset_name) account = Account.by_id(account_id) metadata, row_set = parse_upload(dataset, id) headers = detect_headers(row_set) for row in row_set: data = dict([(c.column, c.value) for c in row]) entity = data.pop(entity_col) if entity_col else None alias = data.pop(alias_col) if alias_col else None if alias_col and alias is not None and len(alias) and alias != entity: d = {'name': alias, 'data': data} alias_obj = Alias.lookup(dataset, d, account, match_entity=False) data = {} if entity_col and entity is not None and len(entity): d = {'name': entity, 'data': data} entity_obj = Entity.by_name(dataset, entity) if entity_obj is None: entity_obj = Entity.create(dataset, d, account) entity_obj.data = data if alias_col and entity_col: alias_obj.match(dataset, {'choice': entity_obj.id}, account) db.session.commit() flush_cache(dataset)
def import_upload(dataset_name, sig, account_id, entity_col, alias_col): dataset = Dataset.find(dataset_name) account = Account.by_id(account_id) metadata, row_set = parse_upload(dataset, sig) headers = detect_headers(row_set) for row in row_set: data = dict([(c.column, c.value) for c in row]) entity = data.pop(entity_col) if entity_col else None alias = data.pop(alias_col) if alias_col else None if alias_col and alias is not None and len(alias) and alias != entity: d = {'name': alias, 'data': data} alias_obj = Alias.lookup(dataset, d, account, match_entity=False) data = {} if entity_col and entity is not None and len(entity): d = {'name': entity, 'data': data} entity_obj = Entity.by_name(dataset, entity) if entity_obj is None: entity_obj = Entity.create(dataset, d, account) entity_obj.data = data if alias_col and entity_col: alias_obj.match(dataset, {'choice': entity_obj.id}, account) db.session.commit() flush_cache()
def lookup(dataset): dataset = Dataset.find(dataset) readonly = validators.StringBool(if_empty=False, if_missing=False)\ .to_python(request.args.get('readonly')) readonly = readonly if authz.logged_in() else True data = request_content() if response_format() != 'json': return Response("Not implemented!", status=400) try: alias = Alias.lookup(dataset, data, request.account, readonly=readonly) if alias is None: return jsonify( { 'is_matched': False, 'entity': None, 'name': data.get('name'), 'dataset': dataset.name }, status=404) if isinstance(alias, Entity): return jsonify( { 'is_matched': True, 'entity': alias, 'name': data.get('name'), 'dataset': dataset.name }, status=200) db.session.commit() status = 200 if alias.is_matched else 404 status = 418 if alias.is_invalid else status return jsonify(alias, status=status) except Invalid, inv: return handle_invalid(inv, index, data=data, args=[dataset.name])