Example #1
0
def capgrids_tree():
    tree = Quadtree((-180, -90, 180, 90))
    keys = {}
    i = 0
    for mapid in range(1, 100):
        mapid = str(mapid)
        for letter in 'abcdefghijklmnop':
            for num in range(1, 10):
                try:
                    b = box(mapid, letter + str(num))
                except IndexError:
                    continue
                v = "%s/%s" % (mapid, (letter + str(num)).capitalize())
                if v not in keys:
                    tree.add(i, b)
                    keys[i] = v
                    i += 1
    return keys, tree
Example #2
0
def main(context, gane_tree, period_map):
    
    catalog = getToolByName(context, 'portal_catalog')
    repo = getToolByName(context, 'portal_repository')
    wftool = getToolByName(context, 'portal_workflow')
    utils = getToolByName(context, 'plone_utils')
    places = context['places']
    errata = context['errata']

    # Language adjustment
    lang_map = {
        'arbd': 'arbd',
        'ethp': 'amh', 
        'hbbb': 'hbo', 
        'nabt': 'qhy-nor', 
        'pmph': 'grc-pam',
        'scr': 'sh',
        'ave': 'ae',
        'bul': 'bg',
        'deu': 'de',
        'ell': 'el',
        'eng': 'en',
        'fas': 'fa',
        'fra': 'fr',
        'hin': 'hi',
        'hun': 'hu',
        'hye': 'hy',
        'ita': 'it',
        'kat': 'ka',
        'kur': 'ku',
        'lat': 'ka',
        'pol': 'pl',
        'por': 'pt',
        'pus': 'ps',
        'ron': 'ro',
        'rus': 'ru',
        'san': 'sa',
        'snd': 'sd',
        'som': 'so',
        'spa': 'es',
        'sqi': 'sq',
        'swa': 'sw',
        'tur': 'tr',
        'urd': 'ur',
        'uzb': 'uz',
        'zho': 'zh'
        }

    import transaction

    for i, (pk, cluster) in enumerate(gane_tree.items()):
        
        # Because 'continue' is used below to skip through the loop, we need
        # to check at the top to see if it's time to batch commit.
        if i > 1 and (i-1) % 100 == 0:
            transaction.commit()
            LOG.info("Subtransaction committed at %s", i)

        savepoint = transaction.savepoint()
        try:
            
            if not pk in cluster:
                LOG.warn("Primary not found, skipping cluster, Pk: %s", pk)
                continue
            
            primary = cluster.pop(pk)
            pid = primary.get('pid')

            LOG.info("Importing cluster, i: %s, Pk: %s, Pid: %s, num items: %s", i, pk, pid, len(cluster))

            if pid and pid in places:
                # Handle links.
                place = places[pid]
                
                if hasattr(place, 'getRemoteUrl'):
                    url = place.getRemoteUrl().rstrip('/')
                    linked_pid = url.split('/')[-1]
                    LOG.info("Following link from %s to %s for Pk: %s", pid, linked_pid, pk)
                    pid = linked_pid
                    place = places[pid]

                action = 'append'
                place_citations = []
                
                LOG.info("Pid: %s, action: %s", pid, action)

            elif "gap.alexandriaarchive.org" in primary['placeURI']:
                gname = primary
                title = gname['title']
                description = "A place from the TAVO Index"
                text = "GANE OBJECT %s" % gname['GANEid']
                placeTypes = ['settlement']
                creators = gname['creators'].split(", ")
                contributors = gname['authors']
                contributors = contributors.replace("F. Deblauwe", "fdeblauwe")
                contributors = contributors.replace("E. Kansa", "ekansa")
                contributors = contributors.split(", ")

                pid = places.invokeFactory('Place',
                    places.generateId(prefix=''),
                    title=title,
                    placeType=placeTypes,
                    description=description,
                    text=text,
                    creators=creators,
                    contributors=contributors,
                    initialProvenance='TAVO Index'
                    )
                place = places[pid]
                action = 'create'
            
                place_citations = [dict(
                    identifier="http://www.worldcat.org/oclc/32624915",
                    range="TAVO Index (Vol. %s, p. %s)" % (
                        gname['reference']['index-volume'],
                        gname['reference']['index-page'] ),
                    type="cites" )]

                for link in gname.get('externalURIs') or []:
                    if "wikipedia" in link['uri']:
                        label = 'Wikipedia "%s."' % link.get('title')
                    else:
                        label = link.get('title', "Untitled GANE Link")
                    place_citations.append(dict(
                        identifier=link['uri'],
                        range=label,
                        type="seeAlso",
                        ))
                
                field = place.getField('referenceCitations')
                field.resize(len(place_citations), place)
                place.setReferenceCitations(place_citations)
                place_citations = []

                now = DateTime(datetime.datetime.now().isoformat())
            
                place.setModificationDate(now)
                repo.save(place, MESSAGE)
            
                LOG.info("Created and archived Place, GANE id: %s, Pleiades id: %s", pk, pid)
            
                wftool.doActionFor(place, action='submit')
                LOG.info("Submitted Place, GANE id: %s, Pleiades id: %s", pk, pid)
            
                wftool.doActionFor(place, action='publish')
                LOG.info("Published Place, GANE id: %s, Pleiades id: %s", pk, pid)
            
            else:
                savepoint.rollback()
                LOG.error("No such place %s for %s", pid, pk)
                continue

            # New name
            for gid, gname, rating in [(pk, primary, 3)] + [
                    (k, v, 2) for k, v in cluster.items() ]:
                
                LOG.info("Naming, gid: %s, gname: %s, rating: %s", gid, gname, rating)

                for lang in (gname.get('title-languages') or
                             [{'iso': None}]):

                    if not gname.get('nameTransliterated'):
                        LOG.warn("No transliteration")

                    # Add a name to the place
                    title = gname['title']
                    description = (
                        "A place name from the TAVO Index (Vol. %s, p. %s)" 
                        % (gname['reference']['index-volume'],
                           gname['reference']['index-page']))
                    nameLanguage = lang_map.get(lang['iso'], lang['iso'])
                    nameTransliterated = u", ".join([title] + 
                        (gname.get('nameTransliterated') or []))
                    text = "GANE OBJECT %s" % gname['GANEid']
                    creators = gname['creators'].split(", ")
                    contributors = gname['authors']
                    contributors = contributors.replace(
                        "F. Deblauwe", "fdeblauwe")
                    contributors = contributors.replace(
                        "E. Kansa", "ekansa")
                    contributors = contributors.split(", ")
                    
                    # Determine a good id for the name
                    nid = utils.normalizeString(title)
                    if len(gname.get('title-languages', None) or []) > 1 and lang['iso']:
                        nid = nid + "-" + lang['iso'].lower()

                    while nid in place.contentIds():
                        match = re.search('-(\d+)$', nid)
                        if match:
                            num = int(match.group(1))
                            nid = re.sub('\d+$', str(num+1), nid)
                        else:
                            nid = nid + "-1"
                    
                    nid = place.invokeFactory(
                        'Name',
                        nid,
                        title=title,
                        description=description,
                        text=text,
                        nameAttested=None,
                        nameLanguage=nameLanguage,
                        nameTransliterated=nameTransliterated,
                        nameType="geographic",
                        creators=creators,
                        contributors=contributors,
                        initialProvenance='TAVO Index')
                    ob = place[nid]

                    atts = [dict(
                        confidence='confident',
                        timePeriod=period_map[p]
                        ) for p in (gname.get('periods') or []) if p in period_map]
                        
                    field = ob.getField('attestations')
                    field.resize(len(atts), ob)
                    ob.setAttestations(atts)

                    citations= [dict(
                        identifier="http://www.worldcat.org/oclc/32624915",
                        range="TAVO Index (Vol. %s, p. %s)" % (
                            gname['reference']['index-volume'],
                            gname['reference']['index-page']),
                        type="cites")]

                    # Possible Wikipedia and other links
                    for link in gname.get('externalURIs') or []:
                    
                        if ("wikipedia" in link['uri'] and 
                            link['uri'] not in [c['identifier'] for c in place_citations]):
                                label = 'Wikipedia "%s."' % link.get('title')
                                place_citations.append(dict(
                                    identifier=link['uri'],
                                    range=label,
                                    type="seeAlso"))
                        else:
                            label = link.get('title', "Untitled GANE Link")
                            citations.append(dict(
                                identifier=link['uri'],
                                range=label,
                                type="seeAlso"))

                    field = ob.getField('referenceCitations')
                    field.resize(len(citations), ob)
                    ob.setReferenceCitations(citations)

                    now = DateTime(datetime.datetime.now().isoformat())
                    ob.setModificationDate(now)
                    repo.save(ob, MESSAGE)
                    rate(ob, "fdeblauwe", rating)
                    rate(ob, "ekansa", rating)

                    LOG.info("Created and archived Name, GANE id: %s, Pleiades id: %s", gid, pid)
            
                    wftool.doActionFor(ob, action='submit')
                    LOG.info("Submitted Name, GANE id: %s, Pleiades id: %s", gid, pid)
            
                    wftool.doActionFor(ob, action='publish')
                    LOG.info("Published Name, GANE id: %s, Pleiades id: %s", gid, pid)

                    field = place.getField('referenceCitations')
                    prev_citations = place.getReferenceCitations()
                    place_citations.extend(prev_citations)

                    unique_place_citation_items = set([tuple(c.items()) for c in place_citations])
                    place_citations = [dict(v) for v in unique_place_citation_items]

                    field.resize(len(place_citations), place)
                    place.setReferenceCitations(place_citations)
                    LOG.info("Updated Place reference citations, GANE id: %s, Pleiades id: %s", gid, pid)

            # Locations

            LOG.info("Locating...")

            if filter(is_high_quality, place.getLocations()):
                # No need for GANE locations
                LOG.info("Place has high quality location(s), continuing...")
                continue

            # Let's take the most accurate coordinates and roll all the
            # periods into one Location.
            points = sorted(filter(
                        lambda t: (t[0] or '8') in '01234567' and t[2].get('extent'),
                        [(get_accuracy(v), k, v) for 
                            k, v in [(pk, primary)] + cluster.items()] ))
            if len(points) < 1:
                LOG.info("No accurate location found, continuing...")
                continue

            all_periods = set(
                chain(*[(n.get('periods') or []) for n in [primary] + cluster.values()]))

            accuracy, gid, gname = points[0]

            text = "GANE OBJECT %s\nMap: %s\nCorner Coordinates: %s\n" % (
                gname['GANEid'],
                (gname.get('main-map') or {}).get('map'),
                gname.get('extent', {'coordinates': None}).get('coordinates'))

            rating = 1
            if accuracy == '0':
                accuracy = '1'

            extent = gname.get('extent')
            if not extent:
                LOG.info("No extent found, continuing...")
                continue
                
            # find the capgrid containing this point
            bounds = None
            if extent['type'] == 'Point':
                b = extent['coordinates']
                b[0] += 0.05
                b[1] += 0.05
                bounds = b + b
            elif extent['type'] == 'Polygon':
                # Fix busted GeoJSON if necessary
                coords = extent['coordinates']
                if isinstance(coords[0][0], (int, float)):
                    extent['coordinates'] = [coords]
                xs, ys = zip(*extent['coordinates'][0])
                bounds = min(xs), min(ys), max(xs), max(ys)

            hits = list(cap_tree.likely_intersection(bounds))

            placeTypes = ['settlement']

            lid = place.invokeFactory(
                'Location',
                'gane-location-%s' % gname['GANEid'],
                title="GANE Location %s" % gname['GANEid'],
                description="Approximate location from the TAVO index",
                text=text,
                featureType=placeTypes,
                creators=creators,
                contributors=contributors,
                initialProvenance='TAVO Index'
                )
            ob = place[lid]
            
            if hits:
                area = 1000000.0
                val = None
                for hit in hits:
                    mapgrid = cap_keys[hit]
                    mapnum, grid = mapgrid.split("/")
                    b = box(mapnum, grid)
                    hit_area = (b[2]-b[0])*(b[3]-b[1])
                    minx, miny, maxx, maxy = bounds
                    x = (minx + maxx)/2.0
                    y = (minx + maxy)/2.0
                    if b[0] <= x <= b[2] and b[1] <= y <= b[3] and hit_area < area:
                        area = hit_area
                        val = mapgrid
                if val:
                    LOG.info("Setting grid location of %s/%s: %s", pid, lid, val)
                    ob.setLocation('http://atlantides.org/capgrids/' + val)
                else:
                    LOG.warn("Grid location of %s/%s unset", pid, lid)
            
            if accuracy:
                mdid = "tavo-%s" % accuracy
                metadataDoc = context['features']['metadata'][mdid]
                ob.addReference(metadataDoc, 'location_accuracy')

            atts = [dict(
                confidence='confident', 
                timePeriod=period_map[p]
                ) for p in all_periods if p in period_map]
            field = ob.getField('attestations')
            field.resize(len(atts), ob)
            ob.setAttestations(atts)

            citations= [dict(
                identifier="http://www.worldcat.org/oclc/32624915",
                range="TAVO Index (Vol. %s, p. %s)" % (
                    gname['reference']['index-volume'],
                    gname['reference']['index-page'] ),
                type="cites" )]

            # Possible Wikipedia and other links
            for link in gname.get('externalURIs') or []:
                if ("wikipedia" in link['uri'] and 
                    link['uri'] not in [c['identifier'] for c in place_citations]):
                    label = 'Wikipedia "%s."' % link.get('title')
                    place_citations.append(dict(
                        identifier=link['uri'],
                        range=label,
                        type="seeAlso"))
                else:
                    label = link.get('title', "Untitled GANE Link")
                    citations.append(dict(
                        identifier=link['uri'],
                        range=label,
                        type="seeAlso"))

            field = ob.getField('referenceCitations')
            field.resize(len(citations), ob)
            ob.setReferenceCitations(citations)

            now = DateTime(datetime.datetime.now().isoformat())
            ob.setModificationDate(now)
            repo.save(ob, MESSAGE)
            
            LOG.info("Created and archived Location, GANE id: %s, Pleiades id: %s", gid, pid)
            
            wftool.doActionFor(ob, action='submit')
            LOG.info("Submitted Location, GANE id: %s, Pleiades id: %s", gid, pid)
            
            wftool.doActionFor(ob, action='publish')
            LOG.info("Published Location, GANE id: %s, Pleiades id: %s", gid, pid)
            
            place.reindexObject()
        
        except Exception, e:
            savepoint.rollback()
            LOG.exception("Rolled back after catching exception: %s in %s" % (e, pk))
Example #3
0
    def place(self, context, vocabs=True):
        """Create a graph centered on a Place and its Feature."""
        g = place_graph()

        purl = context.absolute_url()
        vh_root = context.REQUEST.environ.get('VH_ROOT')
        if vh_root:
            purl = purl.replace(vh_root, '')

        context_page = purl
        context_subj = URIRef(context_page)
        feature_subj = URIRef(context_page + "#this")

        # Type
        g.add((context_subj, RDF.type, PLEIADES['Place']))

        g.add((context_subj, RDF.type, SKOS['Concept']))
        g.add((context_subj, SKOS['inScheme'],
               URIRef("https://pleiades.stoa.org/places")))

        # Triples concerning the real world ancient place.
        g.add((feature_subj, RDF.type, SPATIAL['Feature']))

        # primary topic
        g.add((feature_subj, FOAF['primaryTopicOf'], context_subj))

        # title as rdfs:label
        g.add((feature_subj, RDFS['label'], Literal(context.Title())))

        # description as rdfs:comment
        g.add((feature_subj, RDFS['comment'], Literal(context.Description())))

        orig_url = context_page.replace('https://', 'http://')
        if orig_url and orig_url != context_page:
            g.add((context_subj, OWL['sameAs'], URIRef(orig_url)))

        g = self.dcterms(context, g)
        g = self.provenance(context, g, context_subj)

        # Place or feature types

        place_types = get_vocabulary('place_types')
        place_types = dict([(t['id'], t) for t in place_types])
        url = self.portal.absolute_url() + '/vocabularies/place-types'
        vh_root = context.REQUEST.environ.get('VH_ROOT')
        pcats = set(filter(None, context.getPlaceType()))
        for pcat in pcats:
            item = place_types.get(pcat)
            if not item:
                continue
            iurl = url + '/' + pcat
            g.add((context_subj, PLEIADES['hasFeatureType'], URIRef(iurl)))

            if vocabs:
                g = RegVocabGrapher(self.portal, self.request).concept(
                    'place-types', place_types[pcat], g)

        # Names as skos:label and prefLabel
        folder_path = "/".join(context.getPhysicalPath())
        brains = self.catalog(path={
            'query': folder_path,
            'depth': 1
        },
                              portal_type='Name',
                              review_state='published')
        names = [b.getObject() for b in brains]

        for obj in names:
            name = Literal(
                obj.getNameAttested() or obj.getNameTransliterated(),
                obj.getNameLanguage() or None)
            g.add((context_subj, SKOS['altLabel'], name))

            name_subj = URIRef(context_page + "/" + obj.getId())
            g.add((context_subj, PLEIADES['hasName'], name_subj))
            g.add((name_subj, RDF.type, PLEIADES['Name']))

            orig_url = str(name_subj).replace('https://', 'http://')
            if orig_url and orig_url != str(name_subj):
                g.add((name_subj, OWL['sameAs'], URIRef(orig_url)))

            g = self.dcterms(obj, g)

            g = self.temporal(obj, g, name_subj, vocabs=vocabs)
            g = self.provenance(obj, g, name_subj)
            g = self.references(obj, g, name_subj)

            nameAttested = obj.getNameAttested()
            if nameAttested:
                g.add((name_subj, PLEIADES['nameAttested'],
                       Literal(nameAttested,
                               obj.getNameLanguage() or None)))

            for nr in obj.getNameTransliterated().split(','):
                nr = nr.strip()
                g.add((name_subj, PLEIADES['nameRomanized'], Literal(nr)))

        # representative point
        xs = []
        ys = []
        folder_path = "/".join(context.getPhysicalPath())
        brains = self.catalog(path={
            'query': folder_path,
            'depth': 1
        },
                              portal_type='Location',
                              review_state='published')
        locs = [b.getObject() for b in brains]
        features = [wrap(ob, 0) for ob in locs]

        # get representative point
        loc_prec = location_precision(context)
        if loc_prec == 'precise':
            repr_point = None
            for f in features:
                if f.geometry and hasattr(f.geometry, '__geo_interface__'):
                    shape = asShape(f.geometry)
                    b = shape.bounds
                    xs.extend([b[0], b[2]])
                    ys.extend([b[1], b[3]])
                    if repr_point is None:
                        repr_point = shape.centroid
            if len(xs) * len(ys) > 0:
                bbox = [min(xs), min(ys), max(xs), max(ys)]
            else:
                bbox = None

            if repr_point:
                g.add((context_subj, GEO['lat'], Literal(repr_point.y)))
                g.add((context_subj, GEO['long'], Literal(repr_point.x)))
            elif bbox:
                g.add((context_subj, GEO['lat'],
                       Literal((bbox[1] + bbox[3]) / 2.0)))
                g.add((context_subj, GEO['long'],
                       Literal((bbox[0] + bbox[2]) / 2.0)))
        elif loc_prec == 'rough':
            for loc in locs:
                ref = loc.getLocation()
                gridbase = "http://atlantides.org/capgrids/"
                if ref and ref.startswith(gridbase):
                    try:
                        params = ref.rstrip("/")[len(gridbase):].split("/")
                        if len(params) == 1:
                            mapnum = params[0]
                            grids = [None]
                        elif len(params) == 2:
                            mapnum = params[0]
                            grids = [v.upper() for v in params[1].split("+")]
                        else:
                            log.error("Invalid location identifier %s" % ref)
                            continue
                        for grid in grids:
                            grid_uri = gridbase + mapnum + "#" + (grid
                                                                  or "this")
                            g.add((context_subj, OSSPATIAL['within'],
                                   URIRef(grid_uri)))

                            e = URIRef(grid_uri +
                                       "-extent")  # the grid's extent
                            g.add((e, RDF.type, OSGEO['AbstractGeometry']))
                            g.add((URIRef(grid_uri), OSGEO['extent'], e))
                            bounds = capgrids.box(mapnum, grid)
                            shape = box(*bounds)
                            g.add((e, OSGEO['asGeoJSON'],
                                   Literal(geojson.dumps(shape))))
                            g.add(
                                (e, OSGEO['asWKT'], Literal(wkt.dumps(shape))))
                    except (ValueError, TypeError):
                        log.exception(
                            "Exception caught computing grid extent for %r",
                            loc)

        # Locations
        for obj in locs:

            locn_subj = URIRef(context_page + "/" + obj.getId())
            g.add((context_subj, PLEIADES['hasLocation'], locn_subj))
            g.add((locn_subj, RDF.type, PLEIADES['Location']))

            g = self.dcterms(obj, g)

            g = self.temporal(obj, g, locn_subj, vocabs=vocabs)
            g = self.provenance(obj, g, locn_subj)
            g = self.references(obj, g, locn_subj)

            orig_url = str(locn_subj).replace('https://', 'http://')
            if orig_url and orig_url != str(locn_subj):
                g.add((locn_subj, OWL['sameAs'], URIRef(orig_url)))

            dc_locn = obj.getLocation()
            gridbase = "http://atlantides.org/capgrids/"

            if dc_locn and dc_locn.startswith(gridbase):
                try:
                    params = dc_locn.rstrip("/")[len(gridbase):].split("/")
                    if len(params) == 1:
                        mapnum = params[0]
                        grids = [None]
                    elif len(params) == 2:
                        mapnum = params[0]
                        grids = [v.upper() for v in params[1].split("+")]
                    elif len(params) >= 3:
                        mapnum = params[0]
                        grids = [v.upper() for v in params[1:]]
                    else:
                        log.error("Invalid location identifier %s" % ref)
                        continue

                    for grid in grids:
                        grid_uri = gridbase + mapnum + "#" + (grid or "this")
                        bounds = capgrids.box(mapnum, grid)
                        shape = box(*bounds)

                        g.add((locn_subj, OSSPATIAL['partiallyOverlaps'],
                               URIRef(grid_uri)))

                        e = URIRef(grid_uri + "-extent")  # the grid's extent
                        g.add((e, RDF.type, OSGEO['AbstractGeometry']))
                        g.add((URIRef(grid_uri), OSGEO['extent'], e))
                        g.add((e, OSGEO['asGeoJSON'],
                               Literal(geojson.dumps(shape))))
                        g.add((e, OSGEO['asWKT'], Literal(wkt.dumps(shape))))

                except:
                    log.exception(
                        "Exception caught computing grid extent for %r", obj)

            else:
                try:
                    f = wrap(obj, 0)
                    if (f.geometry
                            and hasattr(f.geometry, '__geo_interface__')):
                        shape = asShape(f.geometry)
                        g.add((locn_subj, OSGEO['asGeoJSON'],
                               Literal(geojson.dumps(shape))))
                        g.add((locn_subj, OSGEO['asWKT'],
                               Literal(wkt.dumps(shape))))
                except:
                    log.exception("Couldn't wrap and graph %r", obj)

        # connects with
        for f in context.getConnectedPlaces():
            if self.wftool.getInfoFor(f, 'review_state') != 'published':
                continue
            furl = f.absolute_url()
            vh_root = context.REQUEST.environ.get('VH_ROOT')
            if vh_root:
                furl = furl.replace(vh_root, '')
            feature_obj = URIRef(furl + "#this")
            g.add((feature_subj, SPATIAL['C'], feature_obj))
            g.add((context_subj, RDFS['seeAlso'], URIRef(furl)))

        # dcterms:coverage
        coverage = geoContext(context)
        if coverage:
            g.add((context_subj, DCTERMS['coverage'], Literal(coverage)))

        g = self.references(context, g, context_subj)

        return g
Example #4
0
    def place(self, context, vocabs=True):
        """Create a graph centered on a Place and its Feature."""
        g = place_graph()
        
        purl = context.absolute_url()
        vh_root = context.REQUEST.environ.get('VH_ROOT')
        if vh_root:
            purl = purl.replace(vh_root, '')
        
        context_page = purl
        context_subj = URIRef(context_page)
        feature_subj = URIRef(context_page + "#this")
        
        # Type
        g.add((context_subj, RDF.type, PLEIADES['Place']))
        
        g.add((context_subj, RDF.type, SKOS['Concept']))
        g.add((
            context_subj, 
            SKOS['inScheme'], 
            URIRef("http://pleiades.stoa.org/places")))
        
        # Triples concerning the real world ancient place.
        g.add((feature_subj, RDF.type, SPATIAL['Feature']))

        # primary topic
        g.add((
            feature_subj,
            FOAF['primaryTopicOf'],
            context_subj))

        # title as rdfs:label
        g.add((
            feature_subj,
            RDFS['label'], 
            Literal(context.Title())))

        # description as rdfs:comment
        g.add((
            feature_subj,
            RDFS['comment'], 
            Literal(context.Description())))

        g = self.dcterms(context, g)
        g = self.provenance(context, g, context_subj)

        # Place or feature types

        place_types = self.vocabs['place-types']
        pcats = set(filter(None, context.getPlaceType()))
        for pcat in pcats:
            item = place_types.get(pcat)
            if not item:
                continue
            iurl = item.absolute_url()
            vh_root = item.REQUEST.environ.get('VH_ROOT')
            if vh_root:
                iurl = iurl.replace(vh_root, '')
            g.add((
                context_subj,
                PLEIADES['hasFeatureType'],
                URIRef(iurl)))

            if vocabs:
                g = VocabGrapher(place_types, self.request).concept(
                    place_types[pcat], g)

        # Names as skos:label and prefLabel
        folder_path = "/".join(context.getPhysicalPath())
        brains = self.catalog(
            path={'query': folder_path, 'depth': 1}, 
            portal_type='Name', 
            review_state='published')
        objs = [b.getObject() for b in brains]
        name_ratings = [
            self.catalog.getIndexDataForRID(
                b.getRID())['average_rating'] for b in brains]
        rated_names = sorted(
            zip(name_ratings, objs),
            reverse=True)
        
        for rating, obj in rated_names[:1]:
            name = Literal(
                obj.getNameAttested() or obj.getNameTransliterated(),
                obj.getNameLanguage() or None)
            if rating and rating[0] > 0.0:
                g.add((
                    context_subj,
                    SKOS['prefLabel'],
                    name))
            else:
                g.add((
                    context_subj,
                    SKOS['altLabel'],
                    name))
        
        for rating, obj in rated_names[1:]:
            name = Literal(
                obj.getNameAttested() or obj.getNameTransliterated(),
                obj.getNameLanguage() or None)
            g.add((
                context_subj,
                SKOS['altLabel'], 
                name))
        
        # Names
        for rating, obj in rated_names:
            
            name_subj = URIRef(context_page + "/" + obj.getId())
            g.add((context_subj, PLEIADES['hasName'], name_subj))
            g.add((name_subj, RDF.type, PLEIADES['Name']))
            
            g = self.dcterms(obj, g)
            
            g = self.temporal(obj, g, name_subj, vocabs=vocabs)
            g = self.provenance(obj, g, name_subj)
            g = self.references(obj, g, name_subj)

            nameAttested = obj.getNameAttested()
            if nameAttested:
                g.add((
                    name_subj, 
                    PLEIADES['nameAttested'], 
                    Literal(nameAttested, obj.getNameLanguage() or None)))

            for nr in obj.getNameTransliterated().split(','):
                nr = nr.strip()
                g.add((name_subj, PLEIADES['nameRomanized'], Literal(nr)))

        ## representative point
        xs = []
        ys = []
        folder_path = "/".join(context.getPhysicalPath())
        brains = self.catalog(
            path={'query': folder_path, 'depth': 1}, 
            portal_type='Location', 
            review_state='published')
        locs = [b.getObject() for b in brains]
        location_ratings = [
            self.catalog.getIndexDataForRID(
                b.getRID())['average_rating'] for b in brains]
        features = [wrap(ob, 0) for ob in locs]

        # get representative point
        loc_prec = location_precision(context)
        if loc_prec == 'precise':
            repr_point = None
            for r, f in sorted(zip(location_ratings, features), reverse=True):
                if f.geometry and hasattr(f.geometry, '__geo_interface__'):
                    shape = asShape(f.geometry)
                    b = shape.bounds
                    xs.extend([b[0], b[2]])
                    ys.extend([b[1], b[3]])
                    if repr_point is None and r and r[0] > 0.0:
                        repr_point = shape.centroid
            if len(xs) * len(ys) > 0:
                bbox = [min(xs), min(ys), max(xs), max(ys)]
            else:
                bbox = None
        
            if repr_point:
                g.add((
                    context_subj,
                    GEO['lat'],
                    Literal(repr_point.y)))
                g.add((
                    context_subj,
                    GEO['long'],
                    Literal(repr_point.x)))
            elif bbox:
                g.add((
                    context_subj,
                    GEO['lat'],
                    Literal((bbox[1]+bbox[3])/2.0)))
                g.add((
                    context_subj,
                    GEO['long'],
                    Literal((bbox[0]+bbox[2])/2.0)))
        elif loc_prec == 'rough':
            for loc in locs:
                ref = loc.getLocation()
                gridbase = "http://atlantides.org/capgrids/"
                if ref and ref.startswith(gridbase):
                    params = ref.rstrip("/")[len(gridbase):].split("/")
                    if len(params) == 1:
                        mapnum = params[0]
                        grids = [None]
                    elif len(params) == 2:
                        mapnum = params[0]
                        grids = [v.upper() for v in params[1].split("+")]
                    else:
                        log.error("Invalid location identifier %s" % ref)
                        continue
                    for grid in grids:
                        grid_uri = gridbase + mapnum + "#" + (grid or "this")
                        bounds = capgrids.box(mapnum, grid)
                        shape = box(*bounds)

                        g.add((
                            context_subj,
                            OSSPATIAL['within'],
                            URIRef(grid_uri)))

                        e = URIRef(grid_uri + "-extent") # the grid's extent
                        g.add((e, RDF.type, OSGEO['AbstractGeometry']))
                        g.add((
                            URIRef(grid_uri),
                            OSGEO['extent'],
                            e))
                        g.add((
                            e,
                            OSGEO['asGeoJSON'],
                            Literal(geojson.dumps(shape))))
                        g.add((
                            e,
                            OSGEO['asWKT'],
                            Literal(wkt.dumps(shape))))

        # Locations
        for obj in locs:
            
            locn_subj = URIRef(context_page + "/" + obj.getId())
            g.add((context_subj, PLEIADES['hasLocation'], locn_subj))
            g.add((locn_subj, RDF.type, PLEIADES['Location']))
            
            g = self.dcterms(obj, g)

            g = self.temporal(obj, g, locn_subj, vocabs=vocabs)
            g = self.provenance(obj, g, locn_subj)
            g = self.references(obj, g, locn_subj)

            dc_locn = obj.getLocation()
            gridbase = "http://atlantides.org/capgrids/"

            if dc_locn and dc_locn.startswith(gridbase):
                try:
                    params = dc_locn.rstrip("/")[len(gridbase):].split("/")
                    if len(params) == 1:
                        mapnum = params[0]
                        grids = [None]
                    elif len(params) == 2:
                        mapnum = params[0]
                        grids = [v.upper() for v in params[1].split("+")]
                    elif len(params) >= 3:
                        mapnum = params[0]
                        grids = [v.upper() for v in params[1:]]
                    else:
                        log.error("Invalid location identifier %s" % ref)
                        continue

                    for grid in grids:
                        grid_uri = gridbase + mapnum + "#" + (grid or "this")
                        bounds = capgrids.box(mapnum, grid)
                        shape = box(*bounds)

                        g.add((
                            locn_subj,
                            OSSPATIAL['partiallyOverlaps'],
                            URIRef(grid_uri)))

                        e = URIRef(grid_uri + "-extent") # the grid's extent
                        g.add((e, RDF.type, OSGEO['AbstractGeometry']))
                        g.add((
                            URIRef(grid_uri),
                            OSGEO['extent'],
                            e))
                        g.add((
                            e,
                            OSGEO['asGeoJSON'],
                            Literal(geojson.dumps(shape))))
                        g.add((
                            e,
                            OSGEO['asWKT'],
                            Literal(wkt.dumps(shape))))

                except:
                    log.exception("Exception caught computing grid extent for %r", obj)

            else:
                try:
                    f = wrap(obj, 0)
                    if (f.geometry and 
                            hasattr(f.geometry, '__geo_interface__')):
                        shape = asShape(f.geometry)
                        g.add((
                            locn_subj,
                            OSGEO['asGeoJSON'],
                            Literal(geojson.dumps(shape))))
                        g.add((
                            locn_subj,
                            OSGEO['asWKT'],
                            Literal(wkt.dumps(shape))))
                except:
                    log.exception("Couldn't wrap and graph %r", obj)

        # connects with
        for f in (context.getConnections() + 
                context.getConnections_from()):
            if self.wftool.getInfoFor(f, 'review_state') != 'published':
                continue
            furl = f.absolute_url()
            vh_root = context.REQUEST.environ.get('VH_ROOT')
            if vh_root:
                furl = furl.replace(vh_root, '')
            feature_obj = URIRef(furl + "#this")
            g.add((feature_subj, SPATIAL['C'], feature_obj))
            g.add((context_subj, RDFS['seeAlso'], URIRef(furl)))

        # dcterms:coverage
        coverage = geoContext(context)
        if coverage:
            g.add((
                context_subj,
                DCTERMS['coverage'],
                Literal(coverage) ))

        g = self.references(context, g, context_subj)

        return g