Пример #1
0
    def get(self, uuid):
        args = versioned_parser.parse_args()
        version = args.get('version')
        bag = database.get_bag(uuid, version=version)

        if not bag:
            abort(404)

        bag_etag = 'bag-{}-version-{}'.format(uuid, bag['version'])
        if request.if_none_match.contains_weak(bag_etag):
            return None, 304

        headers = {}
        headers['Last-Modified'] = format_date_time(bag['created_at'])

        data = json.loads(bag['data'])
        defs, defs_ctx = database.get_periods_and_context(data['items'])

        data['@id'] = identifier.prefix('bags/%s' % uuid)
        data['creator'] = bag['created_by']
        data['items'] = defs

        response = api.make_response(data, 200, headers)
        response.set_etag(bag_etag, weak=True)

        if version is None:
            return cache.no_time(response)
        else:
            return cache.long_time(response)
Пример #2
0
def create_or_update_bag(uuid, creator_id, data):
    db = get_db()
    c = get_db().cursor()
    c.execute('''
    SELECT MAX(version) AS max_version
    FROM bag
    WHERE uuid = ?''', (uuid.hex,))
    row = c.fetchone()
    version = 0 if row['max_version'] is None else row['max_version'] + 1
    if version > 0:
        data['wasRevisionOf'] = identifier.prefix('bags/{}?version={}'.format(
            uuid, row['max_version']))
    c.execute('''
    INSERT INTO bag (
               uuid,
               version,
               created_by,
               data,
               owners)
    VALUES (?, ?, ?, ?, ?)''',
              (uuid.hex,
               version,
               creator_id,
               json.dumps(data),
               json.dumps([creator_id])))
    c.close()
    db.commit()
    return version
Пример #3
0
def get_item(extract_item, id, version=None):
    dataset = get_dataset(version=version)
    o = json.loads(dataset['data'])
    item = extract_item(identifier.prefix(id), o, raiseErrors=True)
    item['@context'] = o['@context']
    if version is not None:
        item['@context']['__version'] = version

    return item
Пример #4
0
def redirect_to_last_update(entity_id, version):
    if version is None:
        return None
    v = database.find_version_of_last_update(
        identifier.prefix(entity_id), version)
    if v is None:
        abort(404)
    if v == int(version):
        return None
    return redirect(request.path + '?version={}'.format(v), code=301)
Пример #5
0
def attach_to_dataset(o):
    if len(o) > 0:
        if app.config['CANONICAL']:
            o['primaryTopicOf'] = {
                'id': identifier.prefix(request.full_path[1:]),
                'inDataset': {
                    'id': identifier.prefix('d'),
                    'changes': identifier.prefix('h#changes')
                }
            }
        else:
            o['primaryTopicOf'] = {
                'id': request.url,
                'inDataset': {
                    'id': url_for('abstract_dataset', _external=True),
                    'changes': url_for('history', _external=True) + '#changes'
                }
            }
    return o
Пример #6
0
    def get(self, definition_id):
        version = request.args.get('version')
        new_location = redirect_to_last_update(definition_id, version)
        if new_location is not None:
            return new_location
        dataset = database.get_dataset(version=version)
        o = json.loads(dataset['data'])
        if 'periodCollections' not in o:
            abort(404)
        definition_key = identifier.prefix(definition_id)
        collection_key = identifier.prefix(definition_id[:5])
        if collection_key not in o['periodCollections']:
            abort_gone_or_not_found(collection_key)
        collection = o['periodCollections'][collection_key]

        if definition_key not in collection['definitions']:
            abort_gone_or_not_found(definition_key)
        definition = collection['definitions'][definition_key]
        definition['collection'] = collection_key
        definition['@context'] = o['@context']
        return attach_to_dataset(definition)
Пример #7
0
def create_or_update_graph(id, data):
    db = get_db()
    c = get_db().cursor()
    c.execute('''
    SELECT MAX(version) AS max_version
    FROM graph
    WHERE id = ?''', (id,))
    row = c.fetchone()
    version = 0 if row['max_version'] is None else row['max_version'] + 1
    if version > 0:
        data['wasRevisionOf'] = identifier.prefix(
            'graphs/{}?version={}'.format(id, row['max_version']))
    c.execute('''
    INSERT INTO graph (
               id,
               version,
               data)
    VALUES (?, ?, ?)''',
              (id,
               version,
               json.dumps(data)))
    c.close()
    db.commit()
    return version
Пример #8
0
def make_nanopub(period_id, version):
    cursor = database.get_db().cursor()

    cursor.execute(
        '''
        SELECT
            patch.id as patch_id,

            patch.merged_at,
            patch.merged_by,
            patch.created_by,

            dataset.data
        FROM patch_request AS patch
        LEFT JOIN dataset ON patch.resulted_in = dataset.id
        WHERE
            patch.created_entities LIKE ?
            OR
            patch.updated_entities LIKE ?
        ORDER BY patch.id ASC
        LIMIT ?, 1;
        ''',
        ('%"' + identifier.prefix(period_id) + '"%',
         '%"' + identifier.prefix(period_id) + '"%',
         version - 1)
    )

    result = cursor.fetchone()

    if not result:
        raise PeriodNotFoundError(
            'Could not find version {} of period {}'.format(
                version, period_id))

    data = json.loads(result['data'])

    authority_id = identifier.prefix(
        period_id[:identifier.AUTHORITY_SEQUENCE_LENGTH + 1])
    authority = data['authorities'][authority_id]
    source = authority['source']
    period = authority['periods'][identifier.prefix(period_id)]
    period['authority'] = authority_id

    nanopub_uri = '{}/nanopub{}'.format(
        identifier.prefix(period_id), version)
    patch_uri = identifier.prefix('h#change-{}'.format(result['patch_id']))

    context = data['@context'].copy()
    context['np'] = 'http://nanopub.org/nschema#'
    context['pub'] = data['@context']['@base'] + nanopub_uri + '#'
    context['prov'] = 'http://www.w3.org/ns/prov#'

    # TODO: Pop "source" from period and include it in the provenance
    # graph?

    return {
        "@context": context,
        "@graph": [
            {
                "@id": "pub:head",
                "@graph": {
                    "@id": nanopub_uri,
                    "@type": "np:Nanopublication",
                    "np:hasAssertion": as_uri("pub:assertion"),
                    "np:hasProvenance": as_uri("pub:provenance"),
                    "np:hasPublicationInfo": as_uri("pub:pubinfo"),
                }
            },
            {
                "@id": "pub:assertion",
                "@graph": [period]
            },
            {
                "@id": "pub:provenance",
                "@graph": [
                    {
                        "@id": 'pub:assertion',
                        "dc:source": source
                    }
                ]
            },
            {
                "@id": "pub:pubinfo",
                "@graph": [
                    {
                        "@id": nanopub_uri,
                        "prov:wasGeneratedBy": as_uri(patch_uri),
                        "prov:asGeneratedAtTime": result['merged_at'],
                        "prov:wasAttributedTo": [
                            as_uri(result['merged_by']),
                            as_uri(result['created_by'])
                        ]
                    }
                ]
            }
        ]
    }
Пример #9
0
def attach_to_dataset(o):
    o['primaryTopicOf'] = {'id': identifier.prefix(request.path[1:]),
                           'inDataset': identifier.prefix('d')}
    return o
Пример #10
0
def history():
    g = Graph()
    changelog = Collection(g, URIRef("#changelog"))
    cursor = database.get_db().cursor()
    for row in cursor.execute(
        """
SELECT
  id,
  created_at,
  created_by,
  updated_by,
  merged_at,
  merged_by,
  applied_to,
  resulted_in,
  created_entities,
  updated_entities,
  removed_entities
FROM patch_request
WHERE merged = 1
ORDER BY id ASC
"""
    ).fetchall():
        change = URIRef("#change-{}".format(row["id"]))
        patch = URIRef("#patch-{}".format(row["id"]))
        g.add((patch, FOAF.page, PERIODO[identifier.prefix(url_for("patch", id=row["id"]))]))
        g.add((change, PROV.startedAtTime, Literal(utils.isoformat(row["created_at"]), datatype=XSD.dateTime)))
        g.add((change, PROV.endedAtTime, Literal(utils.isoformat(row["merged_at"]), datatype=XSD.dateTime)))
        dataset = PERIODO[identifier.prefix(url_for("abstract_dataset"))]
        version_in = PERIODO[identifier.prefix(url_for("abstract_dataset", version=row["applied_to"]))]
        g.add((version_in, PROV.specializationOf, dataset))
        version_out = PERIODO[identifier.prefix(url_for("abstract_dataset", version=row["resulted_in"]))]
        g.add((version_out, PROV.specializationOf, dataset))

        g.add((change, PROV.used, version_in))
        g.add((change, PROV.used, patch))
        g.add((change, PROV.generated, version_out))

        def add_entity_version(entity_id):
            entity = PERIODO[entity_id]
            entity_version = PERIODO[entity_id + "?version={}".format(row["resulted_in"])]
            g.add((entity_version, PROV.specializationOf, entity))
            g.add((change, PROV.generated, entity_version))
            return entity_version

        for entity_id in json.loads(row["created_entities"]):
            add_entity_version(entity_id)

        for entity_id in json.loads(row["updated_entities"]):
            entity_version = add_entity_version(entity_id)
            prev_entity_version = PERIODO[entity_id + "?version={}".format(row["applied_to"])]
            g.add((entity_version, PROV.wasRevisionOf, prev_entity_version))

        for entity_id in json.loads(row["removed_entities"]):
            g.add((change, PROV.invalidated, PERIODO[entity_id]))

        for field, term in (("created_by", "submitted"), ("updated_by", "updated"), ("merged_by", "merged")):
            if row[field] == "initial-data-loader":
                continue
            agent = URIRef(row[field])
            association = URIRef("#patch-{}-{}".format(row["id"], term))
            g.add((change, PROV.wasAssociatedWith, agent))
            g.add((change, PROV.qualifiedAssociation, association))
            g.add((association, PROV.agent, agent))
            g.add((association, PROV.hadRole, PERIODO[identifier.prefix(url_for("vocab") + "#" + term)]))

        changelog.append(change)

    def ordering(o):
        if o["@id"] == "#changelog":
            # sort first
            return " "
        return o["@id"]

    jsonld = json.loads(g.serialize(format="json-ld", context=CONTEXT).decode("utf-8"))
    jsonld["history"] = sorted(jsonld["history"], key=ordering)
    return json.dumps(jsonld, sort_keys=True)