def run(self, resource, *args, **kwds): """Change the rdf resource """ uids = self.context.list_tiles() value = '' for uid in uids: tile = self.context.get_tile(uid) text = tile.data.get('text', None) if text: # convert to unicode if not isinstance(text.output, six.text_type): value += six.text_type(text.output, 'utf-8') else: value += text.output if value: try: setattr(resource, '%s_%s' % ("eea", "cover_tiles"), [value]) except Exception: log.log('RDF marshaller error for context[tiles]' '"%s[": \n%s: %s' % (self.context.absolute_url(), sys.exc_info()[0], sys.exc_info()[1]), severity=log.logging.WARN) return resource
def run(self, resource, adapter, session, *args, **kwds): """ modifier run method """ language = self.context.Language() for field in self.context.Schema().fields(): fieldName = field.getName() if fieldName in adapter.blacklist_map: continue # first we try with a named adapter, then a generic one fieldAdapter = queryMultiAdapter((field, self.context, session), interface=IATField2Surf, name=fieldName) if not fieldAdapter: fieldAdapter = getMultiAdapter((field, self.context, session), interface=IATField2Surf) if not fieldAdapter.exportable: continue try: value = fieldAdapter.value() except Exception: log.log('RDF marshaller error for context[field]' '"%s[%s]": \n%s: %s' % (self.context.absolute_url(), fieldName, sys.exc_info()[0], sys.exc_info()[1]), severity=log.logging.WARN) valueAdapter = queryAdapter(value, interface=IValue2Surf) if valueAdapter: value = valueAdapter(language=language) if not value or value == "None": continue prefix = fieldAdapter.prefix or adapter.prefix if fieldAdapter.name: fieldName = fieldAdapter.name elif fieldName in adapter.field_map: fieldName = adapter.field_map.get(fieldName) elif fieldName in adapter.dc_map: fieldName = adapter.dc_map.get(fieldName) prefix = 'dcterms' try: setattr(resource, '%s_%s' % (prefix, fieldName), value) except Exception: log.log('RDF marshaller error for context[field]' '"%s[%s]": \n%s: %s' % (self.context.absolute_url(), fieldName, sys.exc_info()[0], sys.exc_info()[1]), severity=log.logging.WARN) return resource
def delete_article(self): """Delete an Article from Apple News""" adapter = self.get_adapter() article_id = adapter.data['id'] try: adapter.delete_article() except AppleNewsError as e: log('Handled Apple News Error {}: {}'.format(e, e.data)) if e.code == 404: message = _( u'error_article_delete_cleared', default=u'Article was already deleted (${article_id}). ' u'Clearing Id.', mapping={u'article_id': article_id} ) IStatusMessage(self.request).addStatusMessage( message, "warning" ) else: message = _( u'error_deleting_article', default=u'Error ${error_code} deleting article ' u'(${article_id}). See logs for more details.', mapping={u'error_code': six.text_type(e.code or u''), u'article_id': article_id} ) IStatusMessage(self.request).addStatusMessage(message, "error") else: IStatusMessage(self.request).addStatusMessage( _(u'article_deleted', default=u"Deleted article with id: ${article_id}", mapping={u'article_id': article_id}), "info" )
def run(self, resource, *args, **kwds): """Change the rdf resource """ plone_portal_state = self.context.restrictedTraverse( '@@plone_portal_state') portal_url = plone_portal_state.portal_url() workflowTool = getToolByName(self.context, "portal_workflow") wfs = workflowTool.getWorkflowsFor(self.context) wf = None for wf in wfs: if wf.isInfoSupported(self.context, "portal_workflow"): break status = workflowTool.getInfoFor(self.context, "review_state", None) if status is not None: status = ''.join([ portal_url, "/portal_workflow/", getattr(wf, 'getId', lambda: '')(), "/states/", status ]) try: setattr(resource, '%s_%s' % ("eea", "hasWorkflowState"), rdflib.URIRef(status)) except Exception: log.log('RDF marshaller error for context[workflow_state]' '"%s": \n%s: %s' % (self.context.absolute_url(), sys.exc_info()[0], sys.exc_info()[1]), severity=log.logging.WARN) return resource
def transaction_note(note): """Write human legible note""" T = transaction.get() if (len(T.description) + len(note)) >= 65533: log('Transaction note too large omitting %s' % str(note)) else: T.note(safe_unicode(note))
def monkeyPatch(originalClass, patchingClass): """Monkey patch original class with attributes from new class (Swiped from SpeedPack -- thanks, Christian Heimes!) * Takes all attributes and methods except __doc__ and __module__ from patching class Safes original attributes as _monkey_name * Overwrites/adds these attributes in original class """ log("Monkeypatching class %s with class %s" % (originalClass.__name__,patchingClass.__name__)) for name, newAttr in patchingClass.__dict__.items(): # don't overwrite doc or module informations if name not in ('__doc__', '__module__'): # safe the old attribute as __monkey_name if exists # __dict__ doesn't show inherited attributes :/ log(" - replacing %s" % name) orig = getattr(originalClass, name, None) if orig: stored_orig_name = "__monkey_" + name stored_orig = getattr(originalClass, stored_orig_name, None) # don't double-patch on refresh! if stored_orig is None: setattr(originalClass,stored_orig_name,orig) # overwrite or add the new attribute setattr(originalClass, name, newAttr)
def modify_resource(self, resource, *args, **kwds): """ Schema to Surf """ plone_portal_state = self.context.restrictedTraverse( '@@plone_portal_state') portal_url = plone_portal_state.portal_url() workflowTool = getToolByName(self.context, "portal_workflow") wfs = workflowTool.getWorkflowsFor(self.context) wf = None for wf in wfs: if wf.isInfoSupported(self.context, "portal_workflow"): break status = workflowTool.getInfoFor(self.context, "review_state", None) if status is not None: status = ''.join([portal_url, "/portal_workflow/", getattr(wf, 'getId', lambda: '')(), "/states/", status]) try: setattr(resource, '%s_%s' % ("eea", "hasWorkflowState"), rdflib.URIRef(status)) except Exception: log.log('RDF marshaller error for context[workflow_state]' '"%s": \n%s: %s' % (self.context.absolute_url(), sys.exc_info()[0], sys.exc_info()[1]), severity=log.logging.WARN) return resource
def run(self, resource, *args, **kwds): """Change the rdf resource """ uids = self.context.list_tiles() value = '' for uid in uids: tile = self.context.get_tile(uid) text = tile.data.get('text', None) if text: # convert to unicode if not isinstance(text.output, unicode): value += unicode(text.output, 'utf-8') else: value += text.output if value: try: setattr(resource, '%s_%s' % ("eea", "cover_tiles"), [value]) except Exception: log.log('RDF marshaller error for context[tiles]' '"%s[": \n%s: %s' % (self.context.absolute_url(), sys.exc_info()[0], sys.exc_info()[1]), severity=log.logging.WARN) return resource
def recreateScales(self): """ Recreate Scales """ context = self.context cat = getToolByName(context, 'portal_catalog') for brain in cat( object_provides='eea.soer.content.interfaces.ISoerFigure'): obj = brain.getObject() if obj is None: continue if hasattr(obj, 'image_preview'): continue try: state = obj._p_changed except Exception: state = 0 field = obj.getField('image') if field is not None: if field.getScale(obj, 'preview'): continue log.log('UPDATING scales for %s' % obj.absolute_url()) field.removeScales(obj) field.createScales(obj) if state is None: obj._p_deactivate()
def modify_resource(self, resource, *args, **kwds): """ Schema to Surf """ context = self.context session = self.session # import pdb; pdb.set_trace() setattr(resource, 'rdfs_label', (context.Title(), u'en')) setattr(resource, 'rdfs_comment', (context.Description(), u'en')) setattr(resource, 'rdf_id', self.rdfId) resource.update() # the following hack creates a new instance of a content to # allow extracting the full schema, with extended fields # Is this the only way to do this? # Another way would be to do a catalog search for a portal_type, # grab the first object from there and use that as context portal_type = context.getId() tmpFolder = getToolByName(context, 'portal_url').getPortalObject().\ portal_factory._getTempFolder(portal_type) instance = getattr(tmpFolder, 'rdfstype', None) if instance is None: try: instance = _createObjectByType(portal_type, tmpFolder, 'rdfstype') except Exception: # might be a tool class if DEBUG: raise log.log('RDF marshaller error for FTI "%s": \n%s: %s' % (context.absolute_url(), sys.exc_info()[0], sys.exc_info()[1]), severity=log.logging.WARN) return resource finally: catalog = getToolByName(context, 'portal_catalog') tmpPath = '%s/rdfstype' % '/'.join(tmpFolder.getPhysicalPath()) brains = catalog(path=tmpPath) for br in brains: catalog.uncatalog_object(br.getPath()) if hasattr(instance, 'Schema'): schema = instance.Schema() for field in schema.fields(): fieldName = field.getName() if fieldName in self.blacklist_map: continue field2surf = queryMultiAdapter((field, context, session), interface=IFieldDefinition2Surf) field2surf.write() return resource
def update_rolemap(context): if getattr(context, 'getSite', None) is not None: site = context.getSite() if context.readDataFile("pleiades.policy_various.txt") is None: return else: site = context p_jar = site._p_jar log('Updating workflow mapping for %s' % site.absolute_url(1), severity=logging.WARNING) wft = getToolByName(site, 'portal_workflow') wfs = {} for id in wft.objectIds(): wf = wft.getWorkflowById(id) if hasattr(aq_base(wf), 'updateRoleMappingsFor'): wfs[id] = wf def update_mappings(ob, count, last_count): wf_ids = wft.getChainFor(ob) if wf_ids: changed = 0 for wf_id in wf_ids: wf = wfs.get(wf_id, None) if wf is not None: did = wf.updateRoleMappingsFor(ob) if did: changed = 1 if changed: count = count + 1 if count % COMMIT_LIMIT == 0: transaction.savepoint(optimistic=True) log('Savepoint after updating %d items' % count, severity=logging.WARNING) p_jar.cacheMinimize() if hasattr(aq_base(ob), 'objectItems'): obs = ob.objectItems() if obs: for k, v in obs: changed = getattr(v, '_p_changed', 0) count, last_count = update_mappings(v, count, last_count) if changed is None: # Re-ghostify. v._p_deactivate() if count - last_count >= COMMIT_LIMIT: transaction.commit() last_count = count log('Commit after updating %d items' % count, severity=logging.WARNING) p_jar.cacheMinimize() return count, last_count count, last_count = update_mappings(site, 0, 0) log('Updated %d items' % count, severity=logging.WARNING) return
def _isFeedChanged(self, feed): """ Id feed changed """ feedHash = md5(feed).hexdigest() log.log('%s, %s' % (feedHash, self.feedHash)) if feedHash != self.feedHash: self.feedHash = feedHash return True return False
def regenerateTitles(self): """ Regenerate Titles """ context = self.context cat = getToolByName(context, 'portal_catalog') for b in cat(portal_type=['CommonalityReport', 'FlexibilityReport', 'DiversityReport']): obj = b.getObject() log.log("UPDATING title '%s'" % obj.Title()) notify(ObjectModifiedEvent(obj))
def modify_resource(self, resource, *args, **kwds): language = self.context.Language() ptypes = getToolByName(self.context, 'portal_types') fti = ptypes[self.context.portal_type] for fieldName, field in get_ordered_fields(fti): if fieldName in self.blacklist_map: continue fieldAdapter = queryMultiAdapter( (field, self.context, self.session), interface=IDXField2Surf, name=fieldName) if not fieldAdapter: fieldAdapter = getMultiAdapter( (field, self.context, self.session), interface=IDXField2Surf) if not fieldAdapter.exportable: continue try: value = fieldAdapter.value() except Exception: log.log('RDF marshaller error for context[field]' '"%s[%s]": \n%s: %s' % (self.context.absolute_url(), fieldName, sys.exc_info()[0], sys.exc_info()[1]), severity=log.logging.WARN) continue valueAdapter = queryAdapter(value, interface=IValue2Surf) if valueAdapter: value = valueAdapter(language=language) if not value or value == "None": continue prefix = (fieldAdapter.prefix or self.prefix).replace('.', '') fieldName = fieldAdapter.name if fieldName in self.field_map: fieldName = self.field_map.get(fieldName) elif fieldName in self.dc_map: fieldName = self.dc_map.get(fieldName) prefix = 'dcterms' try: setattr(resource, '%s_%s' % (prefix, fieldName), value) except Exception: log.log('RDF marshaller error for context[field]' '"%s[%s]": \n%s: %s' % (self.context.absolute_url(), fieldName, sys.exc_info()[0], sys.exc_info()[1]), severity=log.logging.WARN) return resource
def clearAndRebuildCatalog(self, threshold=THRESHOLD): """Erases the catalog, then finds everything and indexes it. This is dangerous because an uncaught exception in the middle can result in an incomplete catalog. Additionally, at any given time during the run of this script the catalogs will be in an incomplete state.""" self._catalog.manage_catalogClear() self._uid_catalog.manage_catalogClear() transaction.commit() log('Full transaction after emptying catalogs. ' 'The site may be broken at this point') return self.findAndIndexAll(threshold)
def removeOldCountryMaps(self): """ Removes Old Country Maps """ context = self.context cat = getToolByName(context, 'portal_catalog') for b in cat(portal_type='SOERCountry'): obj = b.getObject() mapid = '%s_map.png' % obj.getId() if hasattr(obj, mapid): log.log('Deleting old map %s for %s' %( mapid, obj.absolute_url())) obj.manage_delObjects(ids=[mapid])
def value(self): """ Value """ try: return self.field.getAccessor(self.context)() except Exception: log.log('RDF marshaller error for context[field]' '"%s[%s]": \n%s: %s' % (self.context.absolute_url(), self.field.getName(), sys.exc_info()[0], sys.exc_info()[1]), severity=log.logging.WARN) return None
def update(self): uids = [] request = self.request b_start = request.form.get('b_start', 0) catalog = getToolByName(self.context, 'portal_catalog') self.batch = Batch( catalog(has_apple_news=True, sort_on='Date', sort_order='descending'), start=b_start, size=50 ) if request.method.lower() == 'post': messages = [] authenticator = getMultiAdapter( (self.context, request), name=u"authenticator" ) if not authenticator.verify(): raise Unauthorized uids = request.get('uids', []) if not uids: return count = 0 brains = catalog(has_apple_news=True, UID=uids) for b in brains: obj = b.getObject() adapter = IAppleNewsActions(obj, alternate=None) if adapter is not None: try: adapter.update_article() count += 1 except AppleNewsError as e: log(u'Handled Apple News Error in bulk update ' u'{}: {}'.format(e, e.data)) if e.code == 409: messages.append( u'Unable to update article "{}" '.format( safe_unicode(b.Title) ) + u'because there are conflicting changes ' u'in Apple News Publisher' ) else: messages.append( u'Unable to update article "{}" '.format( safe_unicode(b.Title) ) + u'check logs for details.' ) msg_adapter = IStatusMessage(self.request) msg_adapter.add( u'Updated {} Apple News article with {} errors'.format( count, len(brains) - count ), type=u"info" ) for msg in messages: msg_adapter.add(msg, type=u'error')
def value(self): """ Value """ value = getattr(aq_base(self.context), self.name, None) try: if callable(value): value = value() return value except Exception: log.log('RDF marshaller error for context[field]' '"%s[%s]": \n%s: %s' % (self.context.absolute_url(), self.name, sys.exc_info()[0], sys.exc_info()[1]), severity=log.logging.WARN) return None
def hasFigure(self): """ Has figure """ context = self.context if context.soer_hasFigure: #i = 0 for fig in context.soer_hasFigure: try: fileName = str(fig.soer_fileName.first) if \ fig.soer_fileName.first else 'tempfile' except Exception: log.log('Figure resource without information %s' % fig, severity=log.logging.WARN) continue sortOrder = 0 if hasattr(fig, 'soer_sortOrder'): if fig.soer_sortOrder.first is not None: sortOrder = int(str(fig.soer_sortOrder.first)) result = { 'url': fig.subject.strip(), 'fileName': fileName, 'caption': str(fig.soer_caption.first), 'description': str(fig.soer_description.first), 'sortOrder': sortOrder } if fig.soer_mediaType.first is not None: result['mediaType'] = fig.soer_mediaType.first.strip() if fig.soer_dataSource.first is not None: dataSrc = fig.soer_dataSource.first if not isinstance(dataSrc, rdflib.URIRef): fileName = str(dataSrc.soer_fileName.first) if \ dataSrc.soer_fileName.first else 'tempfile' result['dataSource'] = { 'url': dataSrc.subject.strip(), 'fileName': fileName, 'dataURL': dataSrc.soer_dataURL.first.strip() } else: result['dataSource'] = { 'url': str(dataSrc), 'fileName': fileName, 'dataURL': str(dataSrc) } log.log('Data source without information %s' % dataSrc, severity=log.logging.WARN) yield result
def reloadFacetedNavigation(self): """ Reload faceted configuration for all countries """ for folder in self.context.getFolderContents( contentFilter={'portal_type' : 'SOERCountry'}, full_objects=True): subtyper = getMultiAdapter((folder, self.request), name=u'faceted_subtyper') country_code = folder.getId() subtyper.enable() log.log('UPDATING facted configuration for %s' % folder.absolute_url()) faceted = FacetedExportImport(folder, folder.REQUEST) faceted.import_xml(import_file=facetedCountry.replace( '<element value="se"/>', '<element value="%s"/>' % country_code).replace(' name="se" ', ' name="%s" ' % country_code))
def conflict_safe_find_and_update(self, action, threshold, res_filter=None, **kw): """Find objects with ZopeFind and perform actions on them in batches. Takes the same keyword args as ZopeFind""" results = self.context.ZopeFind(self.context, search_sub=True, **kw) total = len(results) if res_filter is None: res_filter = lambda x: True # Split the results into subsets lazily slices = ((o for (p, o) in results[start:end] if res_filter(o)) for start, end in izip(xrange(0, total, threshold), xrange(threshold, total+threshold, threshold))) # Run an abort here to help avoid conflicts on the first batch # resulting from the extended run of ZopeFind transaction.abort() log('Transaction abort after finding all content (%s objects).'%total) self.context._p_jar.sync() return self._conflict_safe_update(slices, action)
def hasFigure(self): """ Has figure """ context = self.context if context.soer_hasFigure: #i = 0 for fig in context.soer_hasFigure: try: fileName = str(fig.soer_fileName.first) if \ fig.soer_fileName.first else 'tempfile' except Exception: log.log('Figure resource without information %s' % fig, severity=log.logging.WARN) continue sortOrder = 0 if hasattr(fig, 'soer_sortOrder'): if fig.soer_sortOrder.first is not None: sortOrder = int(str(fig.soer_sortOrder.first)) result = {'url' : fig.subject.strip(), 'fileName' : fileName, 'caption' : str(fig.soer_caption.first), 'description' : str(fig.soer_description.first), 'sortOrder' : sortOrder} if fig.soer_mediaType.first is not None: result['mediaType'] = fig.soer_mediaType.first.strip() if fig.soer_dataSource.first is not None: dataSrc = fig.soer_dataSource.first if not isinstance(dataSrc, rdflib.URIRef): fileName = str(dataSrc.soer_fileName.first) if \ dataSrc.soer_fileName.first else 'tempfile' result['dataSource'] = { 'url' : dataSrc.subject.strip(), 'fileName' : fileName, 'dataURL' : dataSrc.soer_dataURL.first.strip()} else: result['dataSource'] = { 'url' : str(dataSrc), 'fileName' : fileName, 'dataURL' : str(dataSrc)} log.log('Data source without information %s' % dataSrc, severity=log.logging.WARN) yield result
def conflict_safe_catalog_update(self, query, action, threshold, res_filter=None): """Performs an action on objects found with a catalog query in a way that tries to minimize conflicts""" cat = self._catalog brains = cat(query) total = len(brains) if res_filter is None: res_filter = lambda x: True # split into filtered sub-lists of size threshold before filtering slices = ((o for o in brains[start:end] if res_filter(o)) for start, end in izip(xrange(0, total, threshold), xrange(threshold, total+threshold, threshold))) # Run an abort here to help avoid conflicts on the first batch # resulting from the extended catalog search transaction.abort() log('Transaction abort after finding all content (%s objects)'%total) self.context._p_jar.sync() return self._conflict_safe_update(slices, action)
def soerImagesAndLinks(self): """ Soer Images and Links method """ context = self.context cat = getToolByName(context, 'portal_catalog') for b in cat(object_provides='eea.soer.content.interfaces.ISOERReport'): obj = b.getObject() for fig in obj.getFolderContents( contentFilter={'portal_type' : 'Image'}, full_objects=True): if not ISoerFigure.providedBy(fig): directlyProvides(fig, ISoerFigure) log.log('MIGRATED figure %s' % fig.absolute_url()) for link in obj.getFolderContents( contentFilter={'portal_type' : ['Link', 'DataSourceLink']}, full_objects=True): if not ISoerDataFile.providedBy(link): directlyProvides(link, ISoerDataFile) log.log('MIGRATED data sourc %s' % link.absolute_url())
def getIcon(self, context, path): pack = self.getPack(context) icon = None # This variable is just used for the log message icon_path = path try: icon = ImageFile(path, pack.__dict__) except (IOError, OSError): # Fallback: # Assume path is relative to CMFPlone directory path = abspath(join(PACKAGE_HOME, path)) try: icon = ImageFile(path, pack.__dict__) except (IOError, OSError): # if there is some problem loading the fancy image # from the tool then tell someone about it log(('The icon for the product: %s which was set to: %s, ' 'was not found. Using the default.' % (self.product_name, icon_path))) return icon
def resource(self, **kwds): """ Factory for a new Surf resource """ if self._resource is not None: return self._resource try: # pull a new resource from the surf session resource = self.session.get_class(self.namespace[self.portalType])( self.subject) except Exception: if DEBUG: raise log.log('RDF marshaller error \n%s: %s' % (sys.exc_info()[0], sys.exc_info()[1]), severity=log.logging.WARN) return None resource.bind_namespaces([self.prefix]) resource.session = self.session self._resource = resource return resource
def render_locales(self): """Returns JS rendering of Locale file(s). Currently we only load the site default locale, and fall back to en-US. """ self._setHeader() portal_state = queryMultiAdapter((self.context, self.request), name=u'plone_portal_state') self.portal = portal_state.portal() locale = portal_state.default_language() # Capialization format en-US locale = locale[:2] + locale[2:].upper() if len(locale) == 2: locale = self._find_locale_for_lang(locale) if locale not in LOCALES_AVAILABLE: log('citationstyles locale not found for {0}. ' 'Falling back to default.'.format(locale)) # fall back to global default locale = DEFAULT_LOCALE locale_string = self._get_locale_string(locale) return ('collective_csl_info.add_locale({name}, {locale_string});\n' 'collective_csl_info.default_locale = {name};'.format( name=dumps(locale), locale_string=dumps(locale_string)))
def update_article(self): """Update an Article in Apple News""" CheckAuthenticator(self.request) if not _checkPermission('Apple News: Manage News Content', self.context): raise Unauthorized adapter = self.get_adapter() try: adapter.update_article() except AppleNewsError as e: log('Handled Apple News Error {}: {}'.format(e, e.data)) article_id = adapter.data.get('id', u'') if e.code == 409: message = _( u'unable_to_update_article_conflicts', default=u'Unable to update article (${article_id}) ' u'because it has conflicting changes. ' u'Retry again to refresh.', mapping={u'article_id': article_id} ) elif e.code == 418: message = _(u'Error article not published') else: message = _( u'error_updating_article', default=u'Error ${error_code} updating article ' u'(${article_id}). See logs for more details.', mapping={u'error_code': six.text_type(e.code) or u'', u'article_id': article_id} ) IStatusMessage(self.request).addStatusMessage(message, "error") else: article_id = adapter.data.get('id', u'') IStatusMessage(self.request).addStatusMessage( _(u'updated_article_success', default=u"Updated article with id: ${article_id}", mapping={u'article_id': article_id}), "info" )
def __call__(self, *args, **kwds): # Stripped illegal xml characters from string self.value = self.escapeXMLIllegalCharacters() if not self.value.strip(): return None nonEUencodings = [ 'Big5', 'GB2312', 'EUC-TW', 'HZ-GB-2312', 'ISO-2022-CN', 'EUC-JP', 'SHIFT_JIS', 'ISO-2022-JP', 'EUC-KR', 'ISO-2022-KR', 'TIS-620', 'ISO-8859-2', 'Windows-1252' ] language = kwds['language'] encoding = detect(self.value)['encoding'] if encoding in nonEUencodings: value = self.value.decode('utf-8', 'replace') else: try: value = self.value.decode(encoding) except (LookupError, UnicodeDecodeError): log.log("Could not decode to %s in rdfmarshaller" % encoding) value = self.value.decode('utf-8', 'replace') return (value.encode('utf-8').strip(), language)
def __call__(self, *args, **kwds): # Stripped illegal xml characters from string self.value = self.escapeXMLIllegalCharacters() if not self.value.strip(): return None nonEUencodings = ['Big5', 'GB2312', 'EUC-TW', 'HZ-GB-2312', 'ISO-2022-CN', 'EUC-JP', 'SHIFT_JIS', 'ISO-2022-JP', 'EUC-KR', 'ISO-2022-KR', 'TIS-620', 'ISO-8859-2', 'Windows-1252'] language = kwds['language'] encoding = detect(self.value)['encoding'] if encoding in nonEUencodings: value = self.value.decode('utf-8', 'replace') else: try: value = self.value.decode(encoding) except (LookupError, UnicodeDecodeError): log.log("Could not decode to %s in rdfmarshaller" % encoding) value = self.value.decode('utf-8', 'replace') return (value.encode('utf-8').strip(), language)
def update_mappings(ob, count, last_count): wf_ids = wft.getChainFor(ob) if wf_ids: changed = 0 for wf_id in wf_ids: wf = wfs.get(wf_id, None) if wf is not None: did = wf.updateRoleMappingsFor(ob) if did: changed = 1 if changed: count = count + 1 if count % COMMIT_LIMIT == 0: transaction.savepoint(optimistic=True) log('Savepoint after updating %d items' % count, severity=logging.WARNING) p_jar.cacheMinimize() if hasattr(aq_base(ob), 'objectItems'): obs = ob.objectItems() if obs: for k, v in obs: changed = getattr(v, '_p_changed', 0) count, last_count = update_mappings(v, count, last_count) if changed is None: # Re-ghostify. v._p_deactivate() if count - last_count >= COMMIT_LIMIT: transaction.commit() last_count = count log('Commit after updating %d items' % count, severity=logging.WARNING) p_jar.cacheMinimize() return count, last_count
def _conflict_safe_update(self, slices, action): """Perform an action on groups of objects, with transaction commits along the way""" updated = [] trans_failed = [] commit = transaction.commit sync = self.context._p_jar.sync obj_path = self._obj_path for n,s in enumerate(slices): # skip cache purging resolved = list(s) if not resolved: #log('Batch %s was empty'%n) continue [action(r) for r in resolved] try: commit() log('Full transaction committed on batch %s (%s) '%(n, len(resolved))) sync() except ConflictError: transaction.abort() log('Conflict on batch %s, retrying ...'% n, severity=logging.WARN) # try once more [action(r) for r in resolved] try: commit() log('Full transaction committed after retry on batch %s'% n, severity=logging.WARN) sync() except ConflictError: transaction.abort() log_exc('Conflict after two tries on batch %s, continuing'%n) trans_failed.extend(obj_path(o) for o in resolved) continue updated.extend(obj_path(o) for o in resolved) return updated, trans_failed
def __call__(self): context = self.context deleteOld = bool(self.request.get('deleteOld', False)) updateFeed = bool(self.request.get('updateFeed', False)) feeds = {'bg' : ['http://nfp-bg.eionet.eu.int/soer-2010/part-c/rdf'], 'be' : ['http://nfp.irceline.be/soer-2010/@@rdf'], 'sk' : ['http://tsense.sazp.sk/Plone/soer-2010-part-c/slovakia/@@rdf'], 'ie' : ['http://www.epa.ie/environmentinfocus/socio-economic/index.rdf', 'http://www.epa.ie/environmentinfocus/climatechange/index.rdf', 'http://www.epa.ie/environmentinfocus/air/index.rdf', 'http://www.epa.ie/environmentinfocus/water/index.rdf', 'http://www.epa.ie/environmentinfocus/waste/index.rdf', 'http://www.epa.ie/environmentinfocus/land/index.rdf', 'http://www.epa.ie/environmentinfocus/nature/index.rdf', 'http://www.epa.ie/environmentinfocus/socio-economic/greeneconomy/index.rdf', 'http://www.epa.ie/environmentinfocus/socio-economic/' 'irishsustainabledevelopmentmodel/index.rdf'], 'no' : ['http://www.miljostatus.no/rdf'], 'ro' : ['http://www.anpm.ro/soerstories/rdf'], 'si' : ['http://www.arso.gov.si/en/soer/air_pollution.rdf', 'http://www.arso.gov.si/en/soer/alps.rdf', 'http://www.arso.gov.si/en/soer/bear_story.rdf', 'http://www.arso.gov.si/en/soer/biodiversity.rdf', 'http://www.arso.gov.si/en/soer/climate_change.rdf', 'http://www.arso.gov.si/en/soer/country_introduction.rdf', 'http://www.arso.gov.si/en/soer/freshwater.rdf', 'http://www.arso.gov.si/en/soer/land.rdf', 'http://www.arso.gov.si/en/soer/waste.rdf'], # Alps works, rest are broken 'it' : ['http://www.sense.sinanet.isprambiente.it/Plone/air-pollution/@@rdf', 'http://www.sense.sinanet.isprambiente.it/Plone/climate-change/@@rdf', 'http://www.sense.sinanet.isprambiente.it/Plone/waste/@@rdf', 'http://www.sense.sinanet.isprambiente.it/Plone/' 'nature-protection-and-biodiversity/@@rdf', 'http://www.sense.sinanet.isprambiente.it/Plone/land/@@rdf', 'http://www.sense.sinanet.isprambiente.it/Plone/diversity/@@rdf', 'http://www.sense.sinanet.isprambiente.it/Plone/freshwater/@@rdf', 'http://www.sense.sinanet.isprambiente.it/Plone/flexibility-alps/@@rdf', 'http://www.sense.sinanet.isprambiente.it/Plone/' 'flexibility-local-authorities/@@rdf', 'http://www.sense.sinanet.isprambiente.it/Plone/' 'flexibility-organic-farming/@@rdf', 'http://www.sense.sinanet.isprambiente.it/Plone/' 'flexibility-white-certificates/@@rdf'], # Questions don't follow specification 'cz' : ['http://issar.cenia.cz/issar/add/CZ_SOER.rdf'], 'de' : ['http://sites.uba.de/SOER/frm/Diversity.xml', 'http://sites.uba.de/SOER/frm/Air-pollution.xml', 'http://sites.uba.de/SOER/frm/Freshwater.xml', 'http://sites.uba.de/SOER/frm/Climate-change.xml', 'http://sites.uba.de/SOER/frm/Land.xml', 'http://sites.uba.de/SOER/frm/Waste.xml', 'http://sites.uba.de/SOER/frm/Biodiversity.xml', 'http://sites.uba.de/SOER/frm/Flexibility.xml'], # Waiting for feeds 'at' : ['http://www.umweltbundesamt.at/rdf_eea'], 'se' : ['http://www.naturvardsverket.se/en/In-English/Menu/GlobalMenu/Sense---RDF'], # Unaccessible, password protected, they are working on it 'rs' : ['http://www.report.sepa.gov.rs/soer-2010-serbia/@@rdf'], } for country_code, urls in feeds.items(): if hasattr(aq_base(context), country_code): log.log("SENSE setup of '%s'" % country_code) country = context[country_code] if country.getRdfFeed() and deleteOld: log.log("SENSE setup of '%s' found old feeds, deleting " "them" % country_code) oldFeedIds = [b.getId for b in country.getFolderContents( contentFilter={'portal_type' : 'Link'})] country.manage_delObjects(ids=oldFeedIds) country.setRdfFeed('') if not country.getRdfFeed(): country.setRdfFeed(urls[0]) log.log("SENSE setup adding feed %s to '%s'" % (urls[0], country_code)) for url in urls[1:]: feed = country[country.invokeFactory( 'Link', id='tmplink', title=url, remoteUrl=url)] feed._renameAfterCreation(check_auto_id=False) log.log("SENSE setup adding feed %s to '%s'" % (url, country_code)) else: log.log("SENSE setup found old feeds in '%s' skipping " "setup" % country_code) if updateFeed: country.updateFromFeed() else: log.log("SENSE setup did NOT find '%s' no feeds " "were setup." % country_code)
def updateReport(nstory, report=None): """ Update report """ parentReport = None if nstory.portal_type in ['DiversityReport', 'CommonalityReport']: questions = dict( [[v, k] for k, v in vocab.old_long_diversity_questions.items()]) questions.update( dict([[v, k] for k, v in vocab.long_questions.items()])) # Old labels before #3685 questions.update( dict([[v, k] for k, v in vocab.old_long_questions.items()])) question = questions.get(nstory.question, nstory.question) #original_url = nstory.subject.strip() else: question = nstory.question if report is None: parentReport = reports.get( (nstory.topic, nstory.question), None) if parentReport: report = parentReport[parentReport.invokeFactory( nstory.portal_type, id='temp_report', topic=nstory.topic, question=question)] else: report = self[self.invokeFactory( nstory.portal_type, id='temp_report', topic=nstory.topic, question=question)] subject = nstory.keyword if isinstance(subject, (list, tuple)): subject = [k.decode('utf-8') for k in nstory.keyword] if isinstance(subject, basestring): subject = subject.decode('utf-8') report.setLanguage(language) report.setDescription(nstory.description) report.setKeyMessage(tidyUp(nstory.keyMessage)) report.setGeoCoverage(nstory.geoCoverage) report.setSubject(subject) report.setEvaluation(nstory.evaluation) newId = report._renameAfterCreation(check_auto_id=False) if parentReport is None: parentReport = report = self[newId] reports[(nstory.topic, nstory.question)] = report else: report = parentReport[newId] if hasattr(nstory, 'sortOrder'): parentReport.moveObjectToPosition( newId, int(nstory.sortOrder)) else: parentReport.moveObjectsToTop(ids=[newId]) assessment = tidyUp(nstory.assessment) for fig in nstory.hasFigure(): log.log('Fetching Figure: %s' % fig['url']) # read figure try: image = urllib2.urlopen(fig['url'], timeout=10.0) except Exception: log.log('FAILED: Fetching Figure: %s' % fig['url']) continue image_data = image.read() if image_data: figure = getattr(report, fig['fileName'], None) if figure is not None: continue figure = report[report.invokeFactory('Image', id='tempfile', image=image_data)] figure.setTitle(fig['caption'] or fig['fileName']) figure.setDescription(fig['description']) newId = figure._renameAfterCreation(check_auto_id=False) figure = report[newId] publishIfPossible(figure) if fig['url'] in assessment.decode('utf8'): assessment = assessment.replace( fig['url'].encode('utf8'), 'resolveuid/%s' % figure.UID()) if fig.get('dataSource', None) is not None: dataSrc = fig['dataSource'] dataLink = report[report.invokeFactory( 'DataSourceLink', id='tmpdatalink', title=dataSrc['dataURL'], remoteUrl=dataSrc['dataURL'])] dataLink.setLanguage(language) newId = dataLink._renameAfterCreation( check_auto_id=False) dataLink = report[newId] figure.setRelatedItems([dataLink]) publishIfPossible(dataLink) figure.setLanguage(language) report.moveObjectToPosition( figure.getId(), fig['sortOrder']) figure.reindexObject() else: log.log('FAILED: Figure is empty: %s' % fig['url']) i = 0 for indicatorUrl in nstory.relatedIndicator(): i += 1 if not indicatorUrl.startswith('http'): #BBB: need to find out which indicator url # it is for i.e CSI 018 continue title = u'Related indicator' try: url = urllib2.urlopen(indicatorUrl, timeout=2.0) soup = BeautifulSoup(url, 'lxml') title = soup.title.string.encode('utf8').strip() except Exception: # We failed to get the title of # the indicator, use 'Related Indicator' logger.info('Failed to get the title of the indicator') indicator = report[report.invokeFactory( 'RelatedIndicatorLink', id='indicator%s' % i, remoteUrl=indicatorUrl, title=title)] publishIfPossible(indicator) report.setText(assessment, format='text/html') report.setEffectiveDate(nstory.pubDate) publishIfPossible(report) report.original_url = nstory.subject.strip() report.setModificationDate(nstory.modified) report.reindexObject() report.setModificationDate(nstory.modified)
# don't overwrite doc or module informations if name not in ('__doc__', '__module__'): # safe the old attribute as __monkey_name if exists # __dict__ doesn't show inherited attributes :/ log(" - replacing %s" % name) orig = getattr(originalClass, name, None) if orig: stored_orig_name = "__monkey_" + name stored_orig = getattr(originalClass, stored_orig_name, None) # don't double-patch on refresh! if stored_orig is None: setattr(originalClass,stored_orig_name,orig) # overwrite or add the new attribute setattr(originalClass, name, newAttr) class TopicPatches: security = ClassSecurityInfo() security.declarePublic('getObjects') def getObjects(self): """ """ return self.queryCatalog(b_size=50, full_objects=True) from Products.ATContentTypes.content.topic import ATTopic log("Applying PloneGazette patches") monkeyPatch(ATTopic, TopicPatches)
def modify_resource(self, resource, *args, **kwds): language = self.context.Language() ptypes = getToolByName(self.context, 'portal_types') fti = ptypes[self.context.portal_type] for fieldName, field in get_ordered_fields(fti): if fieldName in self.blacklist_map: continue fieldAdapter = queryMultiAdapter( (field, self.context, self.session), interface=IDXField2Surf, name=fieldName ) if not fieldAdapter: fieldAdapter = getMultiAdapter( (field, self.context, self.session), interface=IDXField2Surf) if not fieldAdapter.exportable: continue try: value = fieldAdapter.value() except Exception: log.log('RDF marshaller error for context[field]' '"%s[%s]": \n%s: %s' % (self.context.absolute_url(), fieldName, sys.exc_info()[0], sys.exc_info()[1]), severity=log.logging.WARN) continue valueAdapter = queryAdapter(value, interface=IValue2Surf) if valueAdapter: value = valueAdapter(language=language) if not value or value == "None": continue prefix = (fieldAdapter.prefix or self.prefix).replace('.', '') fieldName = fieldAdapter.name if fieldName in self.field_map: fieldName = self.field_map.get(fieldName) elif fieldName in self.dc_map: fieldName = self.dc_map.get(fieldName) prefix = 'dcterms' try: setattr(resource, '%s_%s' % (prefix, fieldName), value) except Exception: log.log( 'RDF marshaller error for context[field]' '"%s[%s]": \n%s: %s' % ( self.context.absolute_url(), fieldName, sys.exc_info()[0], sys.exc_info()[1] ), severity=log.logging.WARN ) return resource
def ulocalized_time(time, long_format=None, time_only=False, context=None, domain='plonelocales', request=None): # get msgid msgid = long_format and 'date_format_long' or 'date_format_short' if time_only: msgid = 'time_format' # NOTE: this requires the presence of three msgids inside the translation catalog # date_format_long, date_format_short, and time_format # These msgids are translated using interpolation. # The variables used here are the same as used in the strftime formating. # Supported are %A, %a, %B, %b, %H, %I, %m, %d, %M, %p, %S, %Y, %y, %Z, each used as # variable in the msgstr without the %. # For example: "${A} ${d}. ${B} ${Y}, ${H}:${M} ${Z}" # Each language dependend part is translated itself as well. # From http://docs.python.org/lib/module-time.html # # %a Locale's abbreviated weekday name. # %A Locale's full weekday name. # %b Locale's abbreviated month name. # %B Locale's full month name. # %d Day of the month as a decimal number [01,31]. # %H Hour (24-hour clock) as a decimal number [00,23]. # %I Hour (12-hour clock) as a decimal number [01,12]. # %m Month as a decimal number [01,12]. # %M Minute as a decimal number [00,59]. # %p Locale's equivalent of either AM or PM. # %S Second as a decimal number [00,61]. # %y Year without century as a decimal number [00,99]. # %Y Year with century as a decimal number. # %Z Time zone name (no characters if no time zone exists). mapping = {} # convert to DateTime instances. Either a date string or # a DateTime instance needs to be passed. if not IDateTime.providedBy(time): try: time = DateTime(time) except: log('Failed to convert %s to a DateTime object' % time, severity=logging.DEBUG) return None if context is None: # when without context, we cannot do very much. return time.ISO8601() if request is None: request = aq_acquire(context, 'REQUEST') # get the formatstring formatstring = translate(msgid, domain, mapping, request) if formatstring is None or formatstring.startswith('date_') or formatstring.startswith('time_'): # msg catalog was not able to translate this msgids # use default setting properties=getToolByName(context, 'portal_properties').site_properties if long_format: format=properties.localLongTimeFormat else: if time_only: format=properties.localTimeOnlyFormat else: format=properties.localTimeFormat return time.strftime(format) # get the format elements used in the formatstring formatelements = _interp_regex.findall(formatstring) # reformat the ${foo} to foo formatelements = [el[2:-1] for el in formatelements] # add used elements to mapping elements = [e for e in formatelements if e in datetime_formatvariables] # add weekday name, abbr. weekday name, month name, abbr month name week_included = True month_included = True name_elements = [e for e in formatelements if e in name_formatvariables] if not ('a' in name_elements or 'A' in name_elements): week_included = False if not ('b' in name_elements or 'B' in name_elements): month_included = False for key in elements: mapping[key]=time.strftime('%'+key) if week_included: weekday = int(time.strftime('%w')) # weekday, sunday = 0 if 'a' in name_elements: mapping['a']=weekdayname_msgid_abbr(weekday) if 'A' in name_elements: mapping['A']=weekdayname_msgid(weekday) if month_included: monthday = int(time.strftime('%m')) # month, january = 1 if 'b' in name_elements: mapping['b']=monthname_msgid_abbr(monthday) if 'B' in name_elements: mapping['B']=monthname_msgid(monthday) # translate translateable elements for key in name_elements: mapping[key] = translate(mapping[key], domain, context=request, default=mapping[key]) # translate the time string return translate(msgid, domain, mapping, request)
except urllib2.HTTPError, response: pass code = response.getcode() if code in (200, 301, 302): self._set_content(response) else: data = response.read() headers = response.info() retry_after = headers.get('Retry-After') if retry_after: try: retry_after = int(retry_after) except (ValueError, TypeError): retry_after = None log('Zoomit update call failed %s: %s, %s' % ( self.context.absolute_url(), code, data)) self.failed = True self.status = code if retry_after: log('Retry after %s seconds' % retry_after) self.last_response = (data + ('\n Retry after %s seconds' % retry_after)) else: self.last_response = data self.retry_after = retry_after self.update_timestamp = datetime.now() class ZoomItUpdater(form.Form):