Ejemplo n.º 1
0
def suggest(dataset):
    """ 
    Suggest API, emulates Google Refine API. See:
    http://code.google.com/p/google-refine/wiki/SuggestApi
    """
    dataset = Dataset.by_name(dataset)
    entities = Entity.all().filter(Entity.invalid!=True)
    query = request.args.get('prefix', '').strip()
    entities = entities.filter(Entity.name.ilike('%s%%' % query))
    entities = entities.offset(get_offset(field='start'))
    entities = entities.limit(get_limit(default=20))

    matches = []
    for entity in entities:
        matches.append({
            'name': entity.name,
            'n:type': {
                'id': '/' + dataset.name,
                'name': dataset.label
                },
            'id': entity.id
            })
    return jsonify({
        "code" : "/api/status/ok",
        "status" : "200 OK",
        "prefix" : query,
        "result" : matches
        })
Ejemplo n.º 2
0
def reconcile(dataset=None):
    """
    Reconciliation API, emulates Google Refine API. See: 
    http://code.google.com/p/google-refine/wiki/ReconciliationServiceApi
    """
    if dataset is not None:
        dataset = Dataset.by_name(dataset)

    # TODO: Add proper support for types and namespacing.
    data = request.args.copy()
    data.update(request.form.copy())
    if 'query' in data:
        # single 
        q = data.get('query')
        if q.startswith('{'):
            try:
                q = json.loads(q)
            except ValueError:
                raise BadRequest()
        else:
            q = data
        return jsonify(reconcile_op(dataset, q))
    elif 'queries' in data:
        # multiple requests in one query
        qs = data.get('queries')
        try:
            qs = json.loads(qs)
        except ValueError:
            raise BadRequest()
        queries = {}
        for k, q in qs.items():
            queries[k] = reconcile_op(dataset, q)
        return jsonify(queries)
    else:
        return reconcile_index(dataset)
Ejemplo n.º 3
0
def reconcile(dataset):
    """
    Reconciliation API, emulates Google Refine API. See: 
    http://code.google.com/p/google-refine/wiki/ReconciliationServiceApi
    """
    dataset = Dataset.by_name(dataset)

    # TODO: Add proper support for types and namespacing.
    data = request.args.copy()
    data.update(request.form.copy())
    if 'query' in data:
        # single 
        q = data.get('query')
        if q.startswith('{'):
            try:
                q = json.loads(q)
            except ValueError:
                raise BadRequest()
        else:
            q = data
        return jsonify(reconcile_op(dataset, q))
    elif 'queries' in data:
        # multiple requests in one query
        qs = data.get('queries')
        try:
            qs = json.loads(qs)
        except ValueError:
            raise BadRequest()
        queries = {}
        for k, q in qs.items():
            queries[k] = reconcile_op(dataset, q)
        return jsonify(queries)
    else:
        return reconcile_index(dataset)
Ejemplo n.º 4
0
def type_to_dataset(type_name):
    dataset_name = type_name.strip().strip('/')
    dataset = Dataset.by_name(dataset_name)
    if dataset is None:
        raise BadRequest('No type (or invalid type) specified!')
    return dataset
Ejemplo n.º 5
0
def type_to_dataset(type_name):
    dataset_name = type_name.strip().strip('/')
    dataset = Dataset.by_name(dataset_name)
    if dataset is None:
        raise BadRequest('No type (or invalid type) specified!')
    return dataset
Ejemplo n.º 6
0
 def _to_python(self, value, state):
     dataset = Dataset.by_name(value)
     if dataset is None:
         raise Invalid('Dataset not found.', value, None)
     return dataset
Ejemplo n.º 7
0
 def _to_python(self, value, state):
     if Dataset.by_name(value) is None:
         return value
     raise Invalid('Dataset already exists.', value, None)
Ejemplo n.º 8
0
def flush(dataset):
    ds = Dataset.by_name(dataset)
    for alias in Alias.all_unmatched(ds):
        db.session.delete(alias)
    db.session.commit()
Ejemplo n.º 9
0
def flush(dataset):
    logger.info('Flushing')
    ds = Dataset.by_name(dataset)
    for alias in Alias.all_unmatched(ds):
        db.session.delete(alias)
    db.session.commit()