def get_georeference(self): if IGeoreferenceable.providedBy(self.context): try: IWriteGeoreferenced(self.context) coord = Coordinates(self.context).coordinates self['coordinates'] = coord except: pass
def setUp(self): self.portal = self.layer['portal'] setRoles(self.portal, TEST_USER_ID, ['Manager']) self.portal.invokeFactory("Document", "doc") self.doc = self.portal.doc alsoProvides(self.doc, IGeoreferenceable) self.geo = IWriteGeoreferenced(self.doc) self.geo.setGeoInterface('Point', (5.583, 50.633)) self.doc.reindexObject(idxs=['zgeo_geometry', 'collective_geo_styles'])
def test_bounds_widget_adapter(self): # data = Criterion() form = { 'north': '51.00000', 'east': '6.000', 'south': '49.000', 'west': '5.000' } widget = WidgetFilterBrains(self.folder) # document and event have ICoordinates behavior event = api.content.create(container=self.portal, id='event', type='Event') doc = api.content.create(container=self.portal, id='doc', type='Document') geo_doc = IWriteGeoreferenced(doc) geo_event = IWriteGeoreferenced(event) geo_doc.setGeoInterface('Point', (5.583, 50.633)) geo_event.setGeoInterface('Point', (5.73, 50.933)) doc.reindexObject(idxs=['zgeo_geometry', 'collective_geo_styles']) event.reindexObject(idxs=['zgeo_geometry', 'collective_geo_styles']) # import ipdb; ipdb.set_trace() portal_catalog = api.portal.get_tool('portal_catalog') query = {} query['portal_type'] = ['Event', 'Document'] brains = portal_catalog(query) gen = widget(brains, form) geo_brains = [g for g in gen] self.assertEqual(len(geo_brains), 2) geo_event.setGeoInterface('Point', (6.73, 50.933)) event.reindexObject(idxs=['zgeo_geometry', 'collective_geo_styles']) brains = portal_catalog(query) gen = widget(brains, form) geo_brains = [g for g in gen] self.assertEqual(len(geo_brains), 1) geo_doc.setGeoInterface('Point', (5.583, 52.633)) doc.reindexObject(idxs=['zgeo_geometry', 'collective_geo_styles']) brains = portal_catalog(query) gen = widget(brains, form) geo_brains = [g for g in gen] self.assertEqual(len(geo_brains), 0)
def coordinates(self, value): if value: try: from shapely import wkt geom = wkt.loads(value) except ImportError: from pygeoif.geometry import from_wkt geom = from_wkt(value) coords = geom.__geo_interface__ geo = IWriteGeoreferenced(self.context) geo.setGeoInterface(coords['type'], coords['coordinates'])
def create_base_content(portal): setRoles(portal, TEST_USER_ID, ['Manager']) _id = portal.invokeFactory('Folder', 'folder', title='Folder', description="Collective geo test container") folder = portal.get(_id) # set default view for folder folder.setLayout('kml-openlayers') # create topic topic_id = folder.invokeFactory('Topic', 'topic', title="Test Topic") topic = folder[topic_id] c = topic.addCriterion('getId', 'ATSimpleStringCriterion') c.setValue('doc') # create collection collection_id = folder.invokeFactory('Collection', 'collection', title="Test Collection") collection = folder[collection_id] query = [{ 'i': 'getId', 'o': 'plone.app.querystring.operation.string.is', 'v': 'doc' }] collection.setQuery(query) # create document and georeference it doc_id = folder.invokeFactory('Document', 'doc', title='Test document', description='A test document') doc = folder[doc_id] # alsoProvides(doc, IGeoreferenceable) geo = IWriteGeoreferenced(doc) geo.setGeoInterface('Point', (-105, 40)) doc.reindexObject(idxs=['zgeo_geometry']) setRoles(portal, TEST_USER_ID, ['Member'])
def removeCoordinates(self): if (self.isGeoreferenceable()): geo = IWriteGeoreferenced(self.context) geo.removeGeoInterface()
def setCoordinates(self, type, coords): if (self.isGeoreferenceable()): geo = IWriteGeoreferenced(self.context) geo.setGeoInterface(type, coords)
def _fetch_one(self, source, function, limit=None, reimport=False, source_ids=[], autoremove=False): imported = [] len_deleted = 0 try: events = sorted(function(), key=lambda e: (e['last_update'], e['source_id'])) except NoImportDataException: log.info('no data received for %s' % source) return imported, len_deleted existing = self.grouped_existing_events(source) # Autoremove externally deleted events if autoremove: new_ids = [event['source_id'] for event in events] old_ids = existing.keys() delta = list(set(old_ids) - set(new_ids)) if limit is not None: delta = delta[:limit] for source_id in delta: # source id's are not necessarily unique for event_id in existing[source_id]: log.info('Deleting %s' % (event_id)) self.context.manage_delObjects(event_id) len_deleted += 1 if len(events) == 0: return imported, len_deleted fetch_ids = set(event['fetch_id'] for event in events) assert len(fetch_ids) == 1, """ Each event needs a fetch_id which describes the id of the whole fetch process and is therefore the same for all events in a single fetch. See seantis.dir.events.source.guidle:events """ self.annotation_key = hashlib.sha1(list(fetch_ids)[0]).hexdigest() last_update = self.get_last_update_time() last_update_in_run = datetime.min.replace(tzinfo=pytz.timezone('utc')) if not last_update: log.info('initial import') changed_offers_only = False elif reimport: log.info('reimport everything') changed_offers_only = False else: # log.info('importing updates since {}'.format(last_update)) changed_offers_only = True total = len(events) if not limit else limit workflowTool = getToolByName(self.context, 'portal_workflow') categories = dict(cat1=set(), cat2=set()) limit_reached_id = None for ix, event in enumerate(events): if limit_reached_id and limit_reached_id != event['source_id']: break if source_ids and event['source_id'] not in source_ids: continue assert 'last_update' in event, """ Each event needs a last_update datetime info which is used to determine if any changes were done. This is used for importing only changed events. """ if last_update_in_run < event['last_update']: last_update_in_run = event['last_update'] if changed_offers_only and event['source_id'] in existing: if event['last_update'] <= last_update: continue # keep a set of all categories for the suggestions for cat in categories: if cat not in event: event[cat] = set() categories[cat] |= event[cat] # stop at limit if limit and (len(imported) + 1) >= limit and not limit_reached_id: log.info('reached limit of %i events' % limit) # don't quit right away, all events of the same source_id # need to be imported first since they have the same # update_time limit_reached_id = event['source_id'] # flush to disk every 500th event to keep memory usage low if len(imported) != 0 and len(imported) % 500 == 0: transaction.savepoint(True) log.info('importing %i/%i %s @ %s' % ((len(imported) + 1), total, event['title'], event['start'].strftime('%d.%m.%Y %H:%M'))) event['source'] = source # If the existing event has been hidden, we keep it hidden hide_event = False if event['source_id'] in existing: for event_id in existing[event['source_id']]: review_state = self.context.get(event_id).review_state hide_event |= review_state == 'hidden' # source id's are not necessarily unique as a single external # event might have to be represented as more than one event in # seantis.dir.events - therefore updating is done through # deleting first, adding second if event['source_id'] in existing: for event_id in existing[event['source_id']]: self.context.manage_delObjects(event_id) del existing[event['source_id']] # image and attachments are downloaded downloads = { 'image': NamedImage, 'attachment_1': NamedFile, 'attachment_2': NamedFile } def allow_download(download, url): if download != 'image': return True # whitelist the images that are known to work # not working is *.bmp. We could convert but I'd rather # force people to use a sane format return url.lower().endswith( ('png', 'jpg', 'jpeg', '@@images/image')) for download, method in downloads.items(): url = event.get(download) name = download + '_name' if not url or not allow_download(download, url): event[download] = None else: try: event[download] = method(self.download(url)) if name in event: event[download].filename = event[name] except HTTPError: event[download] = None if name in event: del event[name] # latitude and longitude are set through the interface lat, lon = event.get('latitude'), event.get('longitude') if 'latitude' in event: del event['latitude'] if 'longitude' in event: del event['longitude'] # so are categories cats = map(event.get, ('cat1', 'cat2')) del event['cat1'] del event['cat2'] assert 'cat3' not in event and 'cat4' not in event, """ unsupported categories """ obj = createContentInContainer(self.context, 'seantis.dir.events.item', checkConstraints=False, **event) # set coordinates now if lat and lon: try: IWriteGeoreferenced(obj).setGeoInterface( 'Point', map(float, (lon, lat))) except ValueError: pass # followed by the categories IDirectoryCategorized(obj).cat1 = list(cats[0]) IDirectoryCategorized(obj).cat2 = list(cats[1]) workflowTool.doActionFor(obj, 'submit') workflowTool.doActionFor(obj, 'publish') for download in downloads: getattr(obj, download) alsoProvides(obj, IExternalEvent) if hide_event: workflowTool.doActionFor(obj, 'hide') imported.append(obj) self.set_last_update_time(last_update_in_run) # add categories to suggestions for category in categories: key = '%s_suggestions' % category existing = getattr(self.context, key) existing = set(existing) if existing is not None else set() new = categories[category] | existing setattr(self.context, key, sorted(new)) diff = categories[category] - existing if len(diff): log.info('added to %s %s' % (category, diff)) # log.info('committing events for %s' % source) transaction.commit() return imported, len_deleted