def __call__(self): spec = aq_parent(aq_inner(self.context)) latest = IGetVersions(spec).latest_version() if spec.UID() == latest.UID(): version = create_version(self.context) return self.request.RESPONSE.redirect(version.absolute_url()) self.spec_title = latest.Title() self.spec_url = latest.absolute_url() self.date = latest.effective_date or latest.creation_date if "submit" not in self.request.form: return self.template() choice = self.request.form.get("choice") if choice == "here": version = create_version(self.context) #return self.request.RESPONSE.redirect(version.absolute_url()) return "OK" if choice == "newest": version = latest.factory_Assessment()['obj'] #return self.request.RESPONSE.redirect(version.absolute_url()) return "OK" raise ValueError("Unknown option for field choice")
def evolve(context): """ Migrate versionIds for objects that don't have them set """ cat = context.portal_catalog brains = cat.searchResults(missing=True, Language="all") i = 0 for brain in brains: obj = brain.getObject() if not IVersionEnhanced.providedBy(obj): continue # first, check the brain's versionId brain_version = brain.getVersionId if isinstance(brain_version, basestring) and brain_version: # everything fine continue if brain.portal_type == "Discussion Item": continue # skipping Discussion Items, they can't be reindexed versionId = IGetVersions(obj).versionId if isinstance(brain_version, basestring) and not brain_version.strip(): # an empty string, assigning new versionId IAnnotations(obj)[VERSION_ID] = _random_id(obj) #obj.reindexObject() msg = "Migrated versionId storage (empty string) for %s (%s)" % \ (obj.absolute_url(), versionId) logger.info(msg) if (i % 500) == 0: transaction.commit() i += 1 continue if isinstance(versionId, basestring) and not versionId.strip(): # an empty string, assigning new versionId IAnnotations(obj)[VERSION_ID] = _random_id(obj) #obj.reindexObject() msg = "Migrated versionId storage (empty string) for %s (%s)" % \ (obj.absolute_url(), versionId) logger.info(msg) if (i % 500) == 0: transaction.commit() i += 1 continue if not brain.getVersionId: IAnnotations(obj)[VERSION_ID] = _random_id(obj) #obj.reindexObject() msg = "Migrated versionId storage (empty storage) for %s (%s)" % \ (obj.absolute_url(), versionId) logger.info(msg) if (i % 500) == 0: transaction.commit() i += 1 continue migrate_versionId_storage(obj) #this is an old storage:
def filterLatestVersion(self, brains, noOfItems=6): """ Take a list of catalog brains and return only the first noOfItems which are either latest versions or not versioned. """ cat = getToolByName(self.context, 'portal_catalog') res = [] res_urls_set = set() for brain in brains: # if object implements our versioning if 'eea.versions.interfaces.IVersionEnhanced' in brain.object_provides: obj = brain.getObject() versionsObj = IGetVersions(obj) brain_url = brain.getURL() try: is_latest = versionsObj.isLatest() except Exception: logger.warning("Couldn't check if object at %s is latest obj", brain_url) continue if is_latest: # keep it, this is latest object, first checking if the current # brain url is not already added within our results url set if brain_url not in res_urls_set: res_urls_set.add(brain_url) res.append(brain) else: # attempt to retrieve the latest versions of the given brain # if this brains doesn't contain the latest version of the # object latest = versionsObj.latest_version() uid = latest.UID() results = cat.searchResults(UID=uid) if not results: logger.warning("Couldn't find catalog entry for UID %s", uid) else: brain = cat.searchResults(UID=uid)[0] brain_url = brain.getURL() if brain_url not in res_urls_set: res_urls_set.add(brain_url) res.append(brain) else: #this object is not versioned, so keep it brain_url = brain.getURL() if brain_url not in res_urls_set: res_urls_set.add(brain_url) res.append(brain) if len(res) == noOfItems: break #we got enough items # because of performance optimization ticket and #14008 # resort based on effective date since getting the latest version could # mess up the sorting that came from the catalog search res.sort(key=lambda x: x.effective) res.reverse() return res
def __call__(self): spec = aq_parent(aq_inner(self.context)) latest = IGetVersions(spec).latest_version() if spec.UID() == latest.UID(): create_version(self.context) return "OK" return "SEEURL: %s/@@createVersion" % self.context.absolute_url()
def find_not_archived_versions(self, p_types=[]): """ Find not archived older versions and return a detailed report """ info('Start searching for not archived older version!') versionable_interface = 'eea.versions.interfaces.IVersionEnhanced' cat = getToolByName(self, 'portal_catalog') result = [] if p_types: brains = cat.unrestrictedSearchResults( object_provides=versionable_interface, portal_type=p_types) else: brains = cat.unrestrictedSearchResults( object_provides=versionable_interface) info('Checking %s brains.', len(brains)) count = 0 count_not_expired = 0 total = len(brains) for brain in brains: count += 1 if (count % 500) == 0: info('PROCESSING: %s/%s', count, total) # expired, skip if brain.ExpirationDate != 'None': continue obj = brain.getObject() adapter = IGetVersions(obj) # latest version, skip if adapter.isLatest(): continue # if latest published versions, skip latest_versions = adapter.later_versions() latest_published = True for later_ver in latest_versions: if later_ver['review_state'] == 'published': latest_published = False break if latest_published: continue info('Old version not expired: %s', brain.getURL()) result.append(brain.getURL()) count_not_expired += 1 info('Found %s not expired old versions', count_not_expired) info('Done searching for not archived older version!') return result
def getVisualizations(self, title): """ Get Daviz Visualizations for sparql object """ ob = None for sparql in self.context.values(): if sparql.title == title: ob = IGetVersions(sparql).latest_version() break if not ob: return [] return ob.getBRefs('relatesTo')
def run(self, resource, *args, **kwds): """change the rdf resource """ api = IGetVersions(self.context) newer = api.later_versions() older = api.earlier_versions() resource.dcterms_isReplacedBy = [ rdflib.URIRef(i['url']) for i in newer ] resource.dcterms_replaces = [rdflib.URIRef(a['url']) for a in older] resource.save()
def archive_previous_versions(context, skip_already_archived=True, same_archive_date=False, also_children=False, **kwargs): """ Archive previous versions of given object :param object context: object :param bool skip_already_archived: boolean indicating whether it should skip archiving the previous version that is already archived :param bool same_archive_date: boolean indicating whether the object being archived should receive the same archiving date as the context :param bool also_children: boolean indicating whether the children of the versions should also be archived :param dict kwargs: options that are passed to the archive method directly affecting it's results if they are passed :rtype list """ versions_adapter = IGetVersions(context) archivator_adapter = queryAdapter(context, IObjectArchivator) options = kwargs if not options: custom_message = getattr(archivator_adapter, 'custom_message', '') reason = getattr(archivator_adapter, 'reason', 'content_is_outdated') initiator = getattr(archivator_adapter, 'initiator', None) options = { 'custom_message': custom_message, 'initiator': initiator, 'reason': reason } if same_archive_date and getattr(archivator_adapter, 'archive_date'): options.update({'archive_date': archivator_adapter.archive_date}) versions = versions_adapter.versions() previous_versions = [] uid = context.UID() for version in versions: if version.UID() == uid: break previous_versions.append(version) affected_objects = [] for obj in previous_versions: if skip_already_archived: if IObjectArchived.providedBy(obj): continue if also_children: affected_objects.extend(archive_obj_and_children(obj, **options)) else: storage = queryAdapter(obj, IObjectArchivator) storage.archive(obj, **options) affected_objects.append(obj) return affected_objects
def spec_assign_version(context, new_version): """Assign a specific version id to an object We override the same method from eea.versions. We want to be able to reassign version for children Assessments to be at the same version as the children Assessments of the target Specification version. Also, we want to assign the new version to all specification that had the old version. """ #assign new version to context base_assign_version(context, new_version) #search for specifications with the old version and assign new version other_assessments = [] #optimization: children assessments from other specs versions = [o for o in IGetVersions(context).versions() if o.meta_type == "Specification"] for o in versions: IVersionControl(o).setVersionId(new_version) o.reindexObject() other_assessments.extend(o.objectValues("Assessment")) #reassign version ids to context assessments + assessments #from related specifications vid = get_assessment_vid_for_spec_vid(context, new_version) for asmt in (list(context.objectValues('Assessment')) + list(other_assessments)): IVersionControl(asmt).setVersionId(vid) asmt.reindexObject()
def version_id(self): """ get version id of context """ try: return IGetVersions(self.context).versionId except TypeError: return None
def archive_previous_versions(context, skip_already_archived=True, same_archive_date=False, also_children=False, **kwargs): """ Archive previous versions of given object :param object context: object :param bool skip_already_archived: boolean indicating whether it should skip archiving the previous version that is already archived :param bool same_archive_date: boolean indicating whether the object being archived should receive the same archiving date as the context :param bool also_children: boolean indicating whether the children of the versions should also be archived :param dict kwargs: options that are passed to the archive method directly affecting it's results if they are passed :rtype list """ versions_adapter = IGetVersions(context) archivator_adapter = queryAdapter(context, IObjectArchivator) options = kwargs if not options: custom_message = getattr(archivator_adapter, 'custom_message', '') reason = getattr(archivator_adapter, 'reason', 'content_is_outdated') initiator = getattr(archivator_adapter, 'initiator', None) options = {'custom_message': custom_message, 'initiator': initiator, 'reason': reason} if same_archive_date and getattr(archivator_adapter, 'archive_date'): options.update({'archive_date': archivator_adapter.archive_date}) versions = versions_adapter.versions() previous_versions = [] uid = context.UID() for version in versions: if version.UID() == uid: break previous_versions.append(version) affected_objects = [] for obj in previous_versions: if skip_already_archived: if IObjectArchived.providedBy(obj): continue if also_children: affected_objects.extend(archive_obj_and_children(obj, **options)) else: storage = queryAdapter(obj, IObjectArchivator) storage.archive(obj, **options) affected_objects.append(obj) return affected_objects
def __call__(self): cat = self.context.portal_catalog brains = cat.searchResults(portal_type="Assessment", review_state="published") result = [] for b in brains: o = b.getObject() latest = IGetVersions(o).latest_version() result.append(latest.absolute_url()) result = sorted(list(set(result))) #sort and keep only unique links links = ["%s/@@esms.xml" % l for l in result] return "\n".join(links)
def run(self, resource, *args, **kwds): """ change the rdf output """ versionId = IGetVersions(self.context).versionId if not versionId: versionId = self.context.UID() resource.schema_productID = versionId resource.save()
def get_datasets(self): """ Return datasets sorted by legislation """ view = self.context.restrictedTraverse('all-datasets/daviz.json')() values = json.loads(view) datasets = values['items'] results = {} legislation_titles = {} for data in datasets: legislation_label = data['instrument_label'] legislation_url = data['instrument'] legislation_title = data['instrument_title'] key = (legislation_label, legislation_url, legislation_title) if key not in results: results[key] = [] legislation_titles[legislation_label] = [] # get dataset latest version only data_url = data['dataset'] data_url_short = str(data_url.split('http://www.eea.europa.eu/')[1]) data_obj = self.context.unrestrictedTraverse(data_url_short, None) if data_obj: api = IGetVersions(data_obj) latest_version = api.latest_version() latest_version_url = latest_version.absolute_url() data_url = latest_version_url data['dataset'] = latest_version_url date = latest_version.getEffectiveDate() or latest_version.creation_date if not date: field = latest_version.getField('lastUpload') if field: date = field.getAccessor(latest_version)() data['publishing_date'] = date else: continue # avoid dataset duplicated since query returns same datasets with # several rod objects if not data_url in legislation_titles[legislation_label]: results[key].append(data) legislation_titles[legislation_label].append(data_url) return results
def findQuery(self, title): """Find the Query in the bookmarks folder """ ob = None for sparql in self.values(): if sparql.title == title: latest_sparql = IGetVersions(sparql).latest_version() ob = latest_sparql break return ob
def __call__(self, count=5, ver_num=3): res = [] cat = getToolByName(self.context, 'portal_catalog') brains = cat.searchResults({'portal_type': ['EEAFigure'], 'sort_on': 'modified', 'sort_order': 'reverse', 'review_state': 'published'}) for brain in brains: figure = brain.getObject() api = IGetVersions(figure) versions = api.versions() versions_num = len(versions) if versions_num > ver_num: latest_version = api.latest_version() if not latest_version in res: res.append(latest_version) if len(res) == count: break return res
def hasWrongVersionId(context): """ Determines if the assessment belongs to a wrong version group """ cat = getToolByName(context, 'portal_catalog') # parent based checks; this also does codes check because # assessments inherit codes from their parent specification spec = aq_parent(aq_inner(context)) spec_versions = IGetVersions(spec).versions() if not spec in spec_versions: spec_versions.append(spec) all_assessments = [] context_lang = context.getLanguage() for spec in spec_versions: assessments = spec.objectValues("Assessment") for assessment in assessments: if assessment.getLanguage() == context_lang: all_assessments.append(assessment) # now also checking IndicatorFactSheets, using codes to do matching codes = ["%s%s" % (c['set'], c['code']) for c in context.getCodes()] factsheets = [] for code in codes: factsheets.extend([ b.getObject() for b in cat.searchResults( get_codes=code, portal_type="IndicatorFactSheet") ]) version_ids = {} for a in all_assessments + factsheets: vid = IGetVersions(a).versionId version_ids[vid] = version_ids.get(vid, []) + [a] if len(version_ids) == 1: return False return True
def addOrUpdateQuery(self, title, endpoint, query): """Update an already existing query Create new version""" oldSecurityManager = getSecurityManager() newSecurityManager(None, SpecialUsers.system) ob = None changed = True for sparql in self.values(): if sparql.title == title: x1 = IGetVersions(sparql) latest_sparql = x1.latest_version() ob = latest_sparql if latest_sparql.query_with_comments == query: changed = False break if not ob: _id = generateUniqueId("Sparql") _id = self.invokeFactory(type_name="Sparql", id=_id) ob = self[_id] ob.edit( title = title, endpoint_url = endpoint, sparql_query = query, ) ob._renameAfterCreation(check_auto_id=True) ob.invalidateWorkingResult() else: if changed: ob = versions.create_version(ob) ob.edit( sparql_query = query, ) ob.invalidateWorkingResult() setSecurityManager(oldSecurityManager) return ob
def get_diff_vers_setcode(self): """Returns a list of versions of this Spec that have a different main setcode """ diff = [] codes = self.getCodes() if not codes: return diff code = codes[0] for v in IGetVersions(self).versions(): if v.getCodes() and v.getCodes()[0] != code: diff.append(v) return diff
def addOrUpdateQuery(self, title, endpoint, query): """Update an already existing query Create new version""" oldSecurityManager = getSecurityManager() newSecurityManager(None, SpecialUsers.system) ob = None changed = True for sparql in self.values(): if sparql.title == title: x1 = IGetVersions(sparql) latest_sparql = x1.latest_version() ob = latest_sparql if latest_sparql.query_with_comments == query: changed = False break if not ob: _id = generateUniqueId("Sparql") _id = self.invokeFactory(type_name="Sparql", id=_id) ob = self[_id] ob.edit( title=title, endpoint_url=endpoint, sparql_query=query, ) ob._renameAfterCreation(check_auto_id=True) ob.invalidateWorkingResult() else: if changed: ob = versions.create_version(ob) ob.edit(sparql_query=query, ) ob.invalidateWorkingResult() setSecurityManager(oldSecurityManager) return ob
def __call__(self): action = self.element.action obj = self.event.object orig_obj_url = obj.absolute_url(1) versions = IGetVersions(obj).versions() adapter = None val = dict() rec_action = '' if action == "archived": rec_action = 'archive' message = 'New version %s was published' % orig_obj_url else: rec_action = 'unarchive' message = ('Unarchived by content rule because latest version %s ' 'was unpublished') % orig_obj_url if self.element.affectPreviousVersion and self.element.applyRecursively: if len(versions) < 2: # no action to be taken return True else: obj = versions[-2] elif self.element.affectPreviousVersion: if len(versions) < 2: # no action to be taken return True else: obj = versions[-2] else: pass val = dict(initiator='contentRules', reason='Other', custom_message=message) if self.element.applyRecursively: self.recursive_action(obj, rec_action, val) else: adapter = IObjectArchivator(obj) if action == "archived": adapter.archive(obj, **val) else: adapter.unarchive(obj, **val) logger.info("Object %s state is %s", obj.absolute_url(), action) return True
def checkQuery(self, title, endpoint, query): """Check if a query already exists 0 - missing 1 - exists 2 - exists but changed""" found = False changed = True for sparql in self.values(): if sparql.title == title.encode('utf8'): latest_sparql = IGetVersions(sparql).latest_version() found = True if latest_sparql.query_with_comments == query: changed = False break if not found: return 0 if not changed: return 1 return 2
def get_duplicated_codes(self): """Returns codes that are duplicated by some other indicator""" versions = [v.UID() for v in IGetVersions(self).versions()] search = getToolByName(self, 'portal_catalog').searchResults codes = self.getCodes() self_UID = self.UID() #We want to see if there are other specs with the same code #that are not versions of this object. #if any version has the same UID as the checked object, #then we consider all versions to be the same as the object duplicated_codes = [] for code in codes: code = code['set'] + code['code'] brains = search(portal_type="Specification", get_codes=[code]) #brains += cat(portal_type="IndicatorFactSheet", get_codes=[code]) not_same = [ b for b in brains if (b.UID not in versions) and (b.UID != self_UID) ] # now we filter the specification based on their versionId; # we don't want to report all specifications in the versionId group _d = {} for b in not_same: if b.getVersionId == MissingValue: #this is infrequent logger.warn("Missing versionid value: %s", b.getObject()) continue _d[b.getVersionId.strip()] = b if _d: duplicated_codes.append((code, _d.values())) return duplicated_codes
def get_assessment_vid_for_spec_vid(context, versionid): """Returns an assessment version id Given a version id for a specification, returns the version id of the assessments that are contained in any of the Specifications from that versioning group. """ vid = _random_id(context, 10) cat = getToolByName(context, 'portal_catalog') p = '/'.join(context.getPhysicalPath()) brains = cat.searchResults({'getVersionId':versionid, 'portal_type':'Specification'}) brains = [b for b in brains if b.getPath() != p] for brain in brains: obj = brain.getObject() children = obj.objectValues('Assessment') if children: vid = IGetVersions(children[0]).versionId break return vid
def get_version_for(self, obj): """get version for""" return IGetVersions(obj).versionId
def factory_Assessment(self): """factory""" type_name = 'Assessment' create = self.REQUEST.form.get('create_in_latest_spec') if create == 'true': latest = IGetVersions(self).latest_version() if latest.UID() != self.UID(): return latest.factory_Assessment() #drop with error if no PolicyQuestions are created if not self.objectValues('PolicyQuestion'): raise ValueError("You need to create first a Policy Question") #create a version if we already have an Assessment assessments = self.objectValues(type_name) if assessments: #NOTE: we assume the latest object is the last one original = assessments[-1] ast = createVersion(original) return { 'obj': ast, 'subview': '@@edit_aggregated', 'direct_edit': True } #we want to make this assessment a version of a previous assessment #if this Specification is already versioned, so we try get a versionId version_id = None spec_versions = IGetVersions(self).versions() for spec in spec_versions: asts = spec.objectValues("Assessment") if asts: original = asts[0] version_id = IVersionControl(original).versionId break #if there are no other assessments in this version set we look for #other IndicatorFactSheet objects with same indicator code to #get the versionId if not version_id: brains = [] codes = self.get_codes() cat = getToolByName(self, 'portal_catalog') for code in codes[1::2]: brains = cat.searchResults({ 'portal_type': 'IndicatorFactSheet', 'get_codes': code }) if brains: break if brains: version_id = IVersionControl(brains[0].getObject()).versionId #create a new Assessment from scratch #id = self.generateUniqueId(type_name) aid = make_id('assessment', self.objectIds()) new_id = self.invokeFactory(type_name=type_name, id=aid, base_impl=True, title=self.translate( msgid='label-newly-created-type', domain='indicators', default="Newly created ${type_name}", mapping={'type_name': type_name}, )) ast = self[new_id] if version_id: IVersionControl(ast).setVersionId(version_id) #create assessment parts for each policy question for pq in self.objectValues("PolicyQuestion"): aid = ast.invokeFactory( type_name="AssessmentPart", id=ast.generateUniqueId("AssessmentPart"), ) ap = ast[aid] ap.setRelatedItems(pq) try: ap.reindexObject() except AttributeError: log("#ZZZ: this happens when executed from test") ast.reindexObject() notify(ObjectInitializedEvent(ast)) return { 'obj': ast, 'subview': '@@edit_aggregated', 'direct_edit': True }
def __call__(self): event = self.event service_to_ping = self.element.service_to_ping obj = self.event.object container = obj.getParentNode() noasync_msg = 'No instance for async operations was defined.' def pingCRSDS(service_to_ping, obj_url, create): """ Ping the CR/SDS service """ if async_service is None: logger.warn("Can't pingCRSDS, plone.app.async not installed!") return options = {} options['service_to_ping'] = service_to_ping options['obj_url'] = self.sanitize_url(obj_url) options['create'] = create queue = async_service.getQueues()[''] try: async_service.queueJobInQueue(queue, ('rdf', ), ping_CRSDS, self.context, options) except ComponentLookupError: logger.info(noasync_msg) def pingCRSDS_backrel(service_to_ping, obj, create): """ Ping backward relations """ back_relations = obj.getBRefs('relatesTo') for rel in back_relations: if rel is not None: obj_url = "%s/@@rdf" % rel.absolute_url() pingCRSDS(service_to_ping, obj_url, create) def pingCRSDS_children(service_to_ping, obj, create): """ Ping all sub-objects """ if obj.portal_type == "Discussion Item": # 22047 skip object if it's of type Discussion Item return for child in obj.objectIds(): child_obj = obj.get(child) if not child_obj: logger.info("Couldn't retrieve child id %s for %s", child, obj.absolute_url()) continue obj_url = "%s/@@rdf" % child_obj.absolute_url() pingCRSDS(service_to_ping, obj_url, create) pingCRSDS_children(service_to_ping, child_obj, create) # When no request the task is called from a async task, see #19830 request = getattr(obj, 'REQUEST', None) # Detect special object used to force acquisition, see #18904 if isinstance(request, str): request = None create = IObjectAddedEvent.providedBy(event) if service_to_ping == "": return if hasVersionsInstalled and IVersionEnhanced.providedBy(obj) \ and request: obj_versions = IGetVersions(obj).versions() else: obj_versions = [obj] async_service = queryUtility(IAsyncService) # If object has translations if hasLinguaPloneInstalled and ITranslatable.providedBy(obj): if obj.isCanonical(): # Ping all translations for trans in obj.getTranslations().items(): if trans[0] != 'en': trans_obj = trans[1][0] obj_url = trans_obj.absolute_url() pingCRSDS(service_to_ping, obj_url, create) else: # Ping only canonical can_obj = obj.getCanonical() obj_url = can_obj.absolute_url() pingCRSDS(service_to_ping, obj_url, create) # If object was deleted if IObjectRemovedEvent.providedBy(event): # Ping backward relations pingCRSDS_backrel(service_to_ping, obj, create) # Ping all sub-objects pingCRSDS_children(service_to_ping, obj, create) # If object was moved/renamed first ping with the old object's URL if IObjectMovedOrRenamedEvent.providedBy(event): obj_url = "%s/%s/@@rdf" % (event.oldParent.absolute_url(), event.oldName) pingCRSDS(service_to_ping, obj_url, False) # then ping with the container of the old object obj_url = "%s/@@rdf" % event.oldParent.absolute_url() pingCRSDS(service_to_ping, obj_url, False) # Ping backward relations pingCRSDS_backrel(service_to_ping, obj, create) # Ping all sub-objects pingCRSDS_children(service_to_ping, obj, create) # Ping each version for obj in obj_versions: obj_url = "%s/@@rdf" % obj.absolute_url() pingCRSDS(service_to_ping, obj_url, create) # If no Aquisition there is no container, see #18904 if container: obj_url = "%s/@@rdf" % container.absolute_url() pingCRSDS(service_to_ping, obj_url, False) return True
def has_newer_version(self): """has newer version""" return bool(IGetVersions(self).later_versions())
def factory_Assessment(self): """factory""" type_name = 'Assessment' create = self.REQUEST.form.get('create_in_latest_spec') if create == 'true': latest = IGetVersions(self).latest_version() if latest.UID() != self.UID(): return latest.factory_Assessment() #drop with error if no PolicyQuestions are created if not self.objectValues('PolicyQuestion'): raise ValueError("You need to create first a Policy Question") #create a version if we already have an Assessment assessments = self.objectValues(type_name) if assessments: #NOTE: we assume the latest object is the last one original = assessments[-1] ast = createVersion(original) return {'obj':ast, 'subview':'@@edit_aggregated', 'direct_edit':True} #we want to make this assessment a version of a previous assessment #if this Specification is already versioned, so we try get a versionId version_id = None spec_versions = IGetVersions(self).versions() for spec in spec_versions: asts = spec.objectValues("Assessment") if asts: original = asts[0] version_id = IVersionControl(original).versionId break #if there are no other assessments in this version set we look for #other IndicatorFactSheet objects with same indicator code to #get the versionId if not version_id: brains = [] codes = self.get_codes() cat = getToolByName(self, 'portal_catalog') for code in codes[1::2]: brains = cat.searchResults({ 'portal_type': 'IndicatorFactSheet', 'get_codes': code}) if brains: break if brains: version_id = IVersionControl(brains[0].getObject()).versionId #create a new Assessment from scratch #id = self.generateUniqueId(type_name) aid = make_id('assessment', self.objectIds()) new_id = self.invokeFactory(type_name=type_name, id=aid, base_impl=True, title=self.translate( msgid='label-newly-created-type', domain='indicators', default="Newly created ${type_name}", mapping={'type_name':type_name}, )) ast = self[new_id] if version_id: IVersionControl(ast).setVersionId(version_id) #create assessment parts for each policy question for pq in self.objectValues("PolicyQuestion"): aid = ast.invokeFactory(type_name="AssessmentPart", id=ast.generateUniqueId("AssessmentPart"),) ap = ast[aid] ap.setRelatedItems(pq) try: ap.reindexObject() except AttributeError: log("#ZZZ: this happens when executed from test") ast.reindexObject() notify(ObjectInitializedEvent(ast)) return {'obj':ast, 'subview':'@@edit_aggregated', 'direct_edit':True}
def migrate_version(brains, vobj, count, **kwargs): """ migrate_versions given brains and prefix """ increment = True no_versions = [] prefix = str(vobj.title) parent = None datasets = kwargs.get('datasets') if datasets: site = getSite() parent = site.get('eea_permalink_objects') if not parent: parent_id = site.invokeFactory('Folder', 'eea_permalink_objects') parent = site[parent_id] for brain in brains: obj = brain.getObject() if not obj: continue adapter = queryAdapter(obj, IGetVersions) if not adapter: no_versions.append(obj.absolute_url()) continue versions = adapter.versions() latest_version = versions[-1] for obj in versions: verparent = IVersionControl(obj) verparent_id = verparent.versionId if prefix not in verparent_id: version_id = "{0}-{1}".format(prefix, count) orig_id = version_id if vobj.prefix_with_language: version_id = version_id + '-' + obj.getLanguage() if getattr(obj, 'getTranslations', None): translations = obj.getTranslations() if len(translations) > 1: canonical = obj.getCanonical() if vobj.prefix_with_language: version_id = orig_id + '-' + \ canonical.getLanguage() IVersionControl(canonical).setVersionId(version_id) canonical.reindexObject(idxs=['getVersionId']) for trans_tuple in translations.items(): translation = trans_tuple[1][0] if translation != canonical: version_id = orig_id + '-' + trans_tuple[0] IVersionControl(translation).setVersionId( version_id) translation.reindexObject( idxs=['getVersionId']) else: if datasets and obj is latest_version: vid = IGetVersions(obj).versionId zmi_addPermalinkMapping(parent, vid, version_id) verparent.setVersionId(version_id) obj.reindexObject(idxs=['getVersionId']) else: verparent.setVersionId(version_id) obj.reindexObject(idxs=['getVersionId']) increment = True logger.info('%s ==> %s --> %s', obj.absolute_url(1), verparent_id, version_id) else: increment = False if increment: count += 1 if count % 50 == 0: transaction.commit() logger.info("MIGRATION DONE") return count
def __call__(self): self.request.response.setHeader('Content-Type', 'text/xml') now = datetime.datetime.now() year_start = datetime.datetime(year=now.year, month=1, day=1) year_end = datetime.datetime(year=now.year, month=12, day=31) #maybe it should be done as timedelta of 1sec from previous year def getTextKeepHTML(value): """ Retrieve text content from html input while keeping the html """ value = escapeSpecialChars(value) return _toUnicode("<![CDATA[{0}]]>".format((value))) def getTextStripHTML(value): """ Retrieve text content from html input removing the html """ value = escapeSpecialChars(value) return BeautifulSoup(value, 'lxml').get_text() getText = getTextKeepHTML \ if self.request.get("keepHTML", "false") == "true"\ else getTextStripHTML #we extract some info here to simplify code down below spec = self.context.aq_parent effective = self.context.getEffectiveDate() if effective: publish_date = effective.asdatetime().date().strftime("%d/%m/%Y") else: publish_date = "" spec_modified = spec.modified().asdatetime().date().strftime( "%d/%m/%Y") latest_version = IGetVersions(self.context).latest_version() ref_area = u", ".join( [c.decode('utf-8') for c in self.context.getLocation()]) manager_id = spec.getManager_user_id() mtool = getToolByName(spec, 'portal_membership') manager_name = (mtool.getMemberInfo(manager_id) or {}).get('fullname', 'Missing') manager_name = manager_name.decode('utf-8') dpsir_vocab = NamedVocabulary('indicator_dpsir').getVocabularyDict( spec) typology_vocab = NamedVocabulary( 'indicator_typology').getVocabularyDict(spec) dpsir = dpsir_vocab.get(spec.getDpsir()) typology = typology_vocab.get(spec.getTypology()) dpsir_typology = "DPSIR: %s - Typology: %s" % (dpsir, _toUnicode(typology)) themes_vocab = dict(spec._getMergedThemes()) themes = ", ".join([themes_vocab.get(l) for l in spec.getThemes()]) #let's use the already well-formatted temporal coverage browser view temporal_coverage = getMultiAdapter((self.context, self.request), name=u'formatTempCoverage')() units = getText(spec.getUnits()) or u'Not available' data_sets = [ rel for rel in spec.getRelatedItems() if rel.portal_type == 'Data' ] ext_data_sets = [ rel for rel in spec.getRelatedItems() if rel.portal_type == 'ExternalDataSpec' ] out = "" for dataowner in data_sets: out += u" ".join( (_toUnicode(dataowner.Title()), dataowner.absolute_url())) out += u" " for provider_url in dataowner.getDataOwner(): org = spec.getOrganisationName(provider_url) if org: out += u" ".join((_toUnicode(org.Title), provider_url)) out += u" " for eds in ext_data_sets: out += u" ".join((_toUnicode(eds.Title()), eds.absolute_url())) out += u" " org = spec.getOrganisationName(eds.getProvider_url()) if org: out += u" ".join( (_toUnicode(org.Title), eds.getProvider_url())) out += u" " mrefs = [ b.getObject() for b in spec.getFolderContents( contentFilter={'portal_type': 'MethodologyReference'}) ] methodology_reference = getText("\n".join([ (o.Title() + "\n" + o.getDescription()) for o in mrefs ])) uncertainties = getText('Methodology uncertainty: ' +\ spec.getMethodology_uncertainty() +\ '\nData uncertainty: ' +\ spec.getData_uncertainty() +\ '\nRationale uncertainty: ' +\ spec.getRationale_uncertainty()) questions = [ b.getObject() for b in spec.getFolderContents( contentFilter={'portal_type': 'PolicyQuestion'}) ] qpart = "" if questions: main_q = None for q in questions: if q.getIs_key_question(): main_q = q if main_q is not None: qpart += "Key policy question: %s\n" % main_q.Title() for q in questions: if q == main_q: continue qpart += "Specific policy question: %s\n" % q.Title() user_needs = getText('Justification for indicator selection: '+\ spec.getRationale_justification()+ "\n" + qpart) methodology = getText(spec.getMethodology()) methodology_gapfilling = getText(spec.getMethodology_gapfilling()) indicator_definition = getText(spec.Title() + ". " + \ spec.getDefinition()) frequency_of_updates = getText(spec.get_frequency_of_updates()) #The xml construction E = ElementMaker(nsmap=NAMESPACES) header = E.Header( E.ID("DMBB_ESMSIPEEA_A"), E.Prepared(now.replace(microsecond=0).isoformat()), E.Sender(id="4D0"), E.DataSetID("DMBB_ESMSIPEEA_A_1353407791410"), E.DataSetAction("Append"), E.Extracted(now.replace(microsecond=0).isoformat()), E.ReportingBegin(year_start.replace(microsecond=0).isoformat()), E.ReportingEnd(year_end.replace(microsecond=0).isoformat()), ) M = ElementMaker(namespace=NAMESPACES['GenericMetadata'], nsmap=NAMESPACES) metadata = M.MetadataSet( M.MetadataStructureRef('ESMSIPEEA_MSD'), M.MetadataStructureAgencyRef("ESTAT"), M.ReportRef('ESMS_REPORT_FULL'), M.AttributeValueSet( M.TargetRef("FULL_ESMS"), M.TargetValues( M.ComponentValue("2013-A0", component="TIME_PERIOD", object="TimeDimension"), M.ComponentValue("4D0", component="DATA_PROVIDER", object="DataProvider"), M.ComponentValue("DMBB_ESMSIPEEA_A", component="DATAFLOW", object="DataFlow"), ), M.ReportedAttribute( #CONTACT M.Value(), M.ReportedAttribute( M.Value('European Environment Agency (EEA)'), conceptID="CONTACT_ORGANISATION", ), M.ReportedAttribute( M.Value('Not available'), conceptID="ORGANISATION_UNIT", ), M.ReportedAttribute( M.Value(manager_name), conceptID="CONTACT_NAME", ), M.ReportedAttribute( M.Value('Not available'), conceptID="CONTACT_FUNC", ), M.ReportedAttribute( M.Value('Kongens Nytorv 6, 1050, ' 'Copenhagen K, Denmark'), conceptID="CONTACT_MAIL", ), M.ReportedAttribute( M.Value('Not available'), conceptID="CONTACT_EMAIL", ), M.ReportedAttribute( M.Value('Not available'), conceptID="CONTACT_PHONE", ), M.ReportedAttribute( M.Value('Not applicable'), conceptID="CONTACT_FAX", ), conceptID="CONTACT", ), M.ReportedAttribute( #META_UPDATE M.Value(), M.ReportedAttribute( M.Value(publish_date), conceptID="META_CERTIFIED", ), M.ReportedAttribute( M.Value(publish_date), conceptID="META_POSTED", ), M.ReportedAttribute( M.Value(spec_modified), conceptID="META_LAST_UPDATE", ), conceptID="META_UPDATE", ), M.ReportedAttribute( #STAT_PRES M.Value(), M.ReportedAttribute( M.Value(indicator_definition), conceptID="DATA_DESCR", ), M.ReportedAttribute( M.Value(dpsir_typology), conceptID="CLASS_SYSTEM", ), M.ReportedAttribute( M.Value(themes), conceptID="COVERAGE_SECTOR", ), M.ReportedAttribute( M.Value("Not available"), conceptID="STAT_CONC_DEF", ), M.ReportedAttribute( M.Value("Not applicable"), conceptID="STAT_UNIT", ), M.ReportedAttribute( M.Value("Not available"), conceptID="STAT_POP", ), M.ReportedAttribute( M.Value(ref_area), conceptID="REF_AREA", ), M.ReportedAttribute( M.Value(temporal_coverage), conceptID="COVERAGE_TIME", ), M.ReportedAttribute( M.Value("Not available"), conceptID="BASE_PER", ), conceptID="STAT_PRES"), M.ReportedAttribute( M.Value(units), conceptID="UNIT_MEASURE", ), M.ReportedAttribute( M.Value("Not available"), conceptID="REF_PERIOD", ), M.ReportedAttribute( #INST_MANDATE M.Value(), M.ReportedAttribute( M.Value( "Regulation (EC) No 401/2009 of the European " "Parliament and of the Council of 23 April 2009 (available at " "http://eur-lex.europa.eu/LexUriServ/LexUriServ.do?uri=" "CELEX:32009R0401:EN:NOT)"), conceptID="INST_MAN_LA_OA", ), M.ReportedAttribute( M.Value( "Regulation (EC) No 401/2009 of the European " "Parliament and of the Council of 23 April 2009 (available at " "http://eur-lex.europa.eu/LexUriServ/LexUriServ.do?uri=" "CELEX:32009R0401:EN:NOT)"), conceptID="INST_MAN_SHAR", ), conceptID="INST_MANDATE", ), M.ReportedAttribute( #CONF M.Value(), M.ReportedAttribute( M.Value("Not applicable"), conceptID="CONF_DATA_TR", ), M.ReportedAttribute( M.Value("Not applicable"), conceptID="CONF_POLICY", ), conceptID="CONF", ), M.ReportedAttribute( #REL_POLICY M.Value(), M.ReportedAttribute( M.Value("Not available"), conceptID="REL_CAL_POLICY", ), M.ReportedAttribute( M.Value("Not available"), conceptID="REL_CAL_ACCESS", ), M.ReportedAttribute( M.Value("All EEA indicators are public"), conceptID="REL_POL_US_AC", ), conceptID="REL_POLICY", ), M.ReportedAttribute( M.Value("Not available"), M.ReportedAttribute( M.Value("Not available"), conceptID="NEWS_REL", ), M.ReportedAttribute( M.Value(latest_version.absolute_url()), conceptID="PUBLICATIONS", ), M.ReportedAttribute( M.Value( "https://www.eea.europa.eu/data-and-maps/indicators" ), conceptID="ONLINE_DB", ), M.ReportedAttribute( M.Value("Not available"), conceptID="MICRO_DAT_ACC", ), M.ReportedAttribute( M.Value( "Twitter: Indicators are automatically " "announced via EEA's Twitter channel (https://twitter.com/euenvironment), " "which users can follow. RSS feed: Indicators are automatically " "announced in a dedicated EEA indicators RSS feed " "(https://www.eea.europa.eu/data-and-maps/indicators/RSS2), which users can " "subscribe to. A catalogue of all indicators is available " "(https://www.eea.europa.eu/data-and-maps/indicators)." ), conceptID="DISS_OTHER", ), conceptID="DISS_FORMAT", ), M.ReportedAttribute( #ACCESS_DOC M.Value(), M.ReportedAttribute( M.Value(methodology_reference), conceptID="DOC_METHOD", ), M.ReportedAttribute( M.Value("Not available"), conceptID="QUALITY_DOC", ), conceptID="ACCESS_DOC", ), M.ReportedAttribute( #QUALITY_MGMNT M.Value(), M.ReportedAttribute( M.Value("Not available"), conceptID="QUALITY_ASSURE", ), M.ReportedAttribute( M.Value(uncertainties), conceptID="QUALITY_ASSMNT", ), conceptID="QUALITY_MGMNT", ), M.ReportedAttribute( #RELEVANCE M.Value(), M.ReportedAttribute( M.Value(user_needs), conceptID="USER_NEEDS", ), M.ReportedAttribute( M.Value("Not applicable"), conceptID="USER_SAT", ), M.ReportedAttribute( M.Value("Not available"), conceptID="COMPLETENESS", ), conceptID="RELEVANCE", ), M.ReportedAttribute( #ACCURACY M.Value(), M.ReportedAttribute( M.Value("Not available"), conceptID="ACCURACY_OVERALL", ), M.ReportedAttribute( M.Value("Not applicable"), conceptID="SAMPLING_ERR", ), M.ReportedAttribute( M.Value("Not applicable"), conceptID="NONSAMPLING_ERR", ), conceptID="ACCURACY", ), M.ReportedAttribute( #TIMELINESS_PUNCT M.Value(), M.ReportedAttribute( M.Value("Not available"), conceptID="TIMELINESS", ), M.ReportedAttribute( M.Value("Not available"), conceptID="PUNCTUALITY", ), conceptID="TIMELINESS_PUNCT", ), M.ReportedAttribute( #COMPARABILITY M.Value(), M.ReportedAttribute( M.Value("Not available"), M.ReportedAttribute( M.Value("Not available"), conceptID="COMPAR_GEO_COVER", ), M.ReportedAttribute( M.Value("Not available"), conceptID="COMPAR_GEO_COMMENT", ), conceptID="COMPAR_GEO", ), M.ReportedAttribute( M.Value("Not available"), M.ReportedAttribute( M.Value("Not available"), conceptID="COMPAR_TIME_COVER", ), M.ReportedAttribute( M.Value("Not available"), conceptID="COMPAR_TIME_COMMENT", ), conceptID="COMPAR_TIME", ), conceptID="COMPARABILITY", ), M.ReportedAttribute( #COHERENCE M.Value(), M.ReportedAttribute( M.Value("Not applicable"), conceptID="COHER_X_DOM", ), M.ReportedAttribute( M.Value("Not available"), conceptID="COHER_INTERNAL", ), conceptID="COHERENCE", ), M.ReportedAttribute( M.Value("Not applicable"), conceptID="COST_BURDEN", ), M.ReportedAttribute( #DATA_REV M.Value(), M.ReportedAttribute( M.Value( "Indicator assessments are peer reviewed and " "CSIs go under extended country review process. Previous versions of " "indicators are available. Data coming from EEA's data flows have their own " "QA procedure. The quality of third part data is under responsibily of " "respective data providers."), conceptID="REV_POLICY", ), M.ReportedAttribute( M.Value( "Indicator assessments are peer reviewed and " "CSIs go under extended country review process. Previous versions of " "indicators are available. Data coming from EEA's data flows have their own " "QA procedure. The quality of third part data is under responsibily of " "respective data providers."), conceptID="REV_PRACTICE", ), conceptID="DATA_REV", ), M.ReportedAttribute( #STAT_PROCESS M.Value(), M.ReportedAttribute( M.Value(out), conceptID="SOURCE_TYPE", ), M.ReportedAttribute( M.Value("Not available"), conceptID="FREQ_COLL", ), M.ReportedAttribute( M.Value("Not available"), conceptID="COLL_METHOD", ), M.ReportedAttribute( M.Value("Not available"), conceptID="DATA_VALIDATION", ), M.ReportedAttribute( M.Value(methodology), conceptID="DATA_COMP", ), M.ReportedAttribute( M.Value(methodology_gapfilling), conceptID="ADJUSTMENT", ), M.ReportedAttribute( M.Value(frequency_of_updates), conceptID="FREQ_DISS", ), conceptID="STAT_PROCESS", ), M.ReportedAttribute( M.Value( "Please note that more metadata and additional " "information about this indicator is available online at %s. For technical " "issues contact EEA web team at https://www.eea.europa.eu/help/contact-info. " "Metadata extracted automatically by EEA IMS at %s." % (self.context.absolute_url(), now.isoformat())), conceptID="COMMENT_DSET", ), ), ) xsi = "http://www.w3.org/2001/XMLSchema-instance" schemaLocation = "http://www.SDMX.org/resources/SDMXML/schemas/v2_0/genericmetadata SDMXGenericMetadata.xsd " \ "http://www.SDMX.org/resources/SDMXML/schemas/v2_0/message SDMXMessage.xsd" root = lxml.etree.Element( nsel("GenericMetadata"), attrib={"{" + xsi + "}schemaLocation": schemaLocation}, nsmap=NAMESPACES) root.append(header) root.append(metadata) return lxml.etree.tostring(root, pretty_print=True, xml_declaration=True, encoding='UTF-8', standalone="yes")
def __call__(self): self.request.response.setHeader('Content-Type', 'text/xml') now = datetime.datetime.now() year_start = datetime.datetime(year=now.year, month=1, day=1) year_end = datetime.datetime(year=now.year, month=12, day=31) #maybe it should be done as timedelta of 1sec from previous year def getTextKeepHTML(value): """ Retrieve text content from html input while keeping the html """ value = escapeSpecialChars(value) return _toUnicode("<![CDATA[{0}]]>".format((value))) def getTextStripHTML(value): """ Retrieve text content from html input removing the html """ value = escapeSpecialChars(value) return BeautifulSoup(value, 'lxml').get_text() getText = getTextKeepHTML \ if self.request.get("keepHTML", "false") == "true"\ else getTextStripHTML #we extract some info here to simplify code down below spec = self.context.aq_parent effective = self.context.getEffectiveDate() if effective: publish_date = effective.asdatetime().date().strftime("%d/%m/%Y") else: publish_date = "" spec_modified = spec.modified().asdatetime().date().strftime("%d/%m/%Y") latest_version = IGetVersions(self.context).latest_version() ref_area = u", ".join([c.decode('utf-8') for c in self.context.getLocation()]) manager_id = spec.getManager_user_id() mtool = getToolByName(spec, 'portal_membership') manager_name = (mtool.getMemberInfo(manager_id) or {}).get('fullname', 'Missing') manager_name = manager_name.decode('utf-8') dpsir_vocab = NamedVocabulary('indicator_dpsir' ).getVocabularyDict(spec) typology_vocab = NamedVocabulary('indicator_typology' ).getVocabularyDict(spec) dpsir = dpsir_vocab.get(spec.getDpsir()) typology = typology_vocab.get(spec.getTypology()) dpsir_typology = "DPSIR: %s - Typology: %s" % (dpsir, _toUnicode(typology)) themes_vocab = dict(spec._getMergedThemes()) themes = ", ".join([themes_vocab.get(l) for l in spec.getThemes()]) #let's use the already well-formatted temporal coverage browser view temporal_coverage = getMultiAdapter( (self.context, self.request), name=u'formatTempCoverage')() units = getText(spec.getUnits()) or u'Not available' data_sets = [rel for rel in spec.getRelatedItems() if rel.portal_type == 'Data'] ext_data_sets = [rel for rel in spec.getRelatedItems() if rel.portal_type == 'ExternalDataSpec'] out = "" for dataowner in data_sets: out += u" ".join((_toUnicode(dataowner.Title()), dataowner.absolute_url())) out += u" " for provider_url in dataowner.getDataOwner(): org = spec.getOrganisationName(provider_url) if org: out += u" ".join((_toUnicode(org.Title), provider_url)) out += u" " for eds in ext_data_sets: out += u" ".join((_toUnicode(eds.Title()), eds.absolute_url())) out += u" " org = spec.getOrganisationName(eds.getProvider_url()) if org: out += u" ".join((_toUnicode(org.Title), eds.getProvider_url())) out += u" " mrefs = [b.getObject() for b in spec.getFolderContents( contentFilter={'portal_type':'MethodologyReference'})] methodology_reference = getText( "\n".join( [(o.Title() + "\n" + o.getDescription()) for o in mrefs])) uncertainties = getText('Methodology uncertainty: ' +\ spec.getMethodology_uncertainty() +\ '\nData uncertainty: ' +\ spec.getData_uncertainty() +\ '\nRationale uncertainty: ' +\ spec.getRationale_uncertainty()) questions = [b.getObject() for b in spec.getFolderContents( contentFilter={'portal_type':'PolicyQuestion'})] qpart = "" if questions: main_q = None for q in questions: if q.getIs_key_question(): main_q = q if main_q is not None: qpart += "Key policy question: %s\n" % main_q.Title() for q in questions: if q == main_q: continue qpart += "Specific policy question: %s\n" % q.Title() user_needs = getText('Justification for indicator selection: '+\ spec.getRationale_justification()+ "\n" + qpart) methodology = getText(spec.getMethodology()) methodology_gapfilling = getText(spec.getMethodology_gapfilling()) indicator_definition = getText(spec.Title() + ". " + \ spec.getDefinition()) frequency_of_updates = getText(spec.get_frequency_of_updates()) #The xml construction E = ElementMaker(nsmap=NAMESPACES) header = E.Header( E.ID("DMBB_ESMSIPEEA_A"), E.Prepared(now.replace(microsecond=0).isoformat()), E.Sender(id="4D0"), E.DataSetID("DMBB_ESMSIPEEA_A_1353407791410"), E.DataSetAction("Append"), E.Extracted(now.replace(microsecond=0).isoformat()), E.ReportingBegin(year_start.replace(microsecond=0).isoformat()), E.ReportingEnd(year_end.replace(microsecond=0).isoformat()), ) M = ElementMaker(namespace=NAMESPACES['GenericMetadata'], nsmap=NAMESPACES) metadata = M.MetadataSet( M.MetadataStructureRef('ESMSIPEEA_MSD'), M.MetadataStructureAgencyRef("ESTAT"), M.ReportRef('ESMS_REPORT_FULL'), M.AttributeValueSet( M.TargetRef("FULL_ESMS"), M.TargetValues( M.ComponentValue("2013-A0", component="TIME_PERIOD", object="TimeDimension"), M.ComponentValue("4D0", component="DATA_PROVIDER", object="DataProvider"), M.ComponentValue("DMBB_ESMSIPEEA_A", component="DATAFLOW", object="DataFlow"), ), M.ReportedAttribute( #CONTACT M.Value(), M.ReportedAttribute( M.Value('European Environment Agency (EEA)'), conceptID="CONTACT_ORGANISATION", ), M.ReportedAttribute( M.Value('Not available'), conceptID="ORGANISATION_UNIT", ), M.ReportedAttribute( M.Value(manager_name), conceptID="CONTACT_NAME", ), M.ReportedAttribute( M.Value('Not available'), conceptID="CONTACT_FUNC", ), M.ReportedAttribute( M.Value('Kongens Nytorv 6, 1050, ' 'Copenhagen K, Denmark'), conceptID="CONTACT_MAIL", ), M.ReportedAttribute( M.Value('Not available'), conceptID="CONTACT_EMAIL", ), M.ReportedAttribute( M.Value('Not available'), conceptID="CONTACT_PHONE", ), M.ReportedAttribute( M.Value('Not applicable'), conceptID="CONTACT_FAX", ), conceptID="CONTACT", ), M.ReportedAttribute( #META_UPDATE M.Value(), M.ReportedAttribute( M.Value(publish_date), conceptID="META_CERTIFIED", ), M.ReportedAttribute( M.Value(publish_date), conceptID="META_POSTED", ), M.ReportedAttribute( M.Value(spec_modified), conceptID="META_LAST_UPDATE", ), conceptID="META_UPDATE", ), M.ReportedAttribute( #STAT_PRES M.Value(), M.ReportedAttribute( M.Value(indicator_definition), conceptID="DATA_DESCR", ), M.ReportedAttribute( M.Value(dpsir_typology), conceptID="CLASS_SYSTEM", ), M.ReportedAttribute( M.Value(themes), conceptID="COVERAGE_SECTOR", ), M.ReportedAttribute( M.Value("Not available"), conceptID="STAT_CONC_DEF", ), M.ReportedAttribute( M.Value("Not applicable"), conceptID="STAT_UNIT", ), M.ReportedAttribute( M.Value("Not available"), conceptID="STAT_POP", ), M.ReportedAttribute( M.Value(ref_area), conceptID="REF_AREA", ), M.ReportedAttribute( M.Value(temporal_coverage), conceptID="COVERAGE_TIME", ), M.ReportedAttribute( M.Value("Not available"), conceptID="BASE_PER", ), conceptID="STAT_PRES" ), M.ReportedAttribute( M.Value(units), conceptID="UNIT_MEASURE", ), M.ReportedAttribute( M.Value("Not available"), conceptID="REF_PERIOD", ), M.ReportedAttribute( #INST_MANDATE M.Value(), M.ReportedAttribute( M.Value("Regulation (EC) No 401/2009 of the European " "Parliament and of the Council of 23 April 2009 (available at " "http://eur-lex.europa.eu/LexUriServ/LexUriServ.do?uri=" "CELEX:32009R0401:EN:NOT)"), conceptID="INST_MAN_LA_OA", ), M.ReportedAttribute( M.Value("Regulation (EC) No 401/2009 of the European " "Parliament and of the Council of 23 April 2009 (available at " "http://eur-lex.europa.eu/LexUriServ/LexUriServ.do?uri=" "CELEX:32009R0401:EN:NOT)"), conceptID="INST_MAN_SHAR", ), conceptID="INST_MANDATE", ), M.ReportedAttribute( #CONF M.Value(), M.ReportedAttribute( M.Value("Not applicable"), conceptID="CONF_DATA_TR", ), M.ReportedAttribute( M.Value("Not applicable"), conceptID="CONF_POLICY", ), conceptID="CONF", ), M.ReportedAttribute( #REL_POLICY M.Value(), M.ReportedAttribute( M.Value("Not available"), conceptID="REL_CAL_POLICY", ), M.ReportedAttribute( M.Value("Not available"), conceptID="REL_CAL_ACCESS", ), M.ReportedAttribute( M.Value("All EEA indicators are public"), conceptID="REL_POL_US_AC", ), conceptID="REL_POLICY", ), M.ReportedAttribute( M.Value("Not available"), M.ReportedAttribute( M.Value("Not available"), conceptID="NEWS_REL", ), M.ReportedAttribute( M.Value(latest_version.absolute_url()), conceptID="PUBLICATIONS", ), M.ReportedAttribute( M.Value( "https://www.eea.europa.eu/data-and-maps/indicators"), conceptID="ONLINE_DB", ), M.ReportedAttribute( M.Value("Not available"), conceptID="MICRO_DAT_ACC", ), M.ReportedAttribute( M.Value("Twitter: Indicators are automatically " "announced via EEA's Twitter channel (https://twitter.com/euenvironment), " "which users can follow. RSS feed: Indicators are automatically " "announced in a dedicated EEA indicators RSS feed " "(https://www.eea.europa.eu/data-and-maps/indicators/RSS2), which users can " "subscribe to. A catalogue of all indicators is available " "(https://www.eea.europa.eu/data-and-maps/indicators)."), conceptID="DISS_OTHER", ), conceptID="DISS_FORMAT", ), M.ReportedAttribute( #ACCESS_DOC M.Value(), M.ReportedAttribute( M.Value(methodology_reference), conceptID="DOC_METHOD", ), M.ReportedAttribute( M.Value("Not available"), conceptID="QUALITY_DOC", ), conceptID="ACCESS_DOC", ), M.ReportedAttribute( #QUALITY_MGMNT M.Value(), M.ReportedAttribute( M.Value("Not available"), conceptID="QUALITY_ASSURE", ), M.ReportedAttribute( M.Value(uncertainties), conceptID="QUALITY_ASSMNT", ), conceptID="QUALITY_MGMNT", ), M.ReportedAttribute( #RELEVANCE M.Value(), M.ReportedAttribute( M.Value(user_needs), conceptID="USER_NEEDS", ), M.ReportedAttribute( M.Value("Not applicable"), conceptID="USER_SAT", ), M.ReportedAttribute( M.Value("Not available"), conceptID="COMPLETENESS", ), conceptID="RELEVANCE", ), M.ReportedAttribute( #ACCURACY M.Value(), M.ReportedAttribute( M.Value("Not available"), conceptID="ACCURACY_OVERALL", ), M.ReportedAttribute( M.Value("Not applicable"), conceptID="SAMPLING_ERR", ), M.ReportedAttribute( M.Value("Not applicable"), conceptID="NONSAMPLING_ERR", ), conceptID="ACCURACY", ), M.ReportedAttribute( #TIMELINESS_PUNCT M.Value(), M.ReportedAttribute( M.Value("Not available"), conceptID="TIMELINESS", ), M.ReportedAttribute( M.Value("Not available"), conceptID="PUNCTUALITY", ), conceptID="TIMELINESS_PUNCT", ), M.ReportedAttribute( #COMPARABILITY M.Value(), M.ReportedAttribute( M.Value("Not available"), M.ReportedAttribute( M.Value("Not available"), conceptID="COMPAR_GEO_COVER", ), M.ReportedAttribute( M.Value("Not available"), conceptID="COMPAR_GEO_COMMENT", ), conceptID="COMPAR_GEO", ), M.ReportedAttribute( M.Value("Not available"), M.ReportedAttribute( M.Value("Not available"), conceptID="COMPAR_TIME_COVER", ), M.ReportedAttribute( M.Value("Not available"), conceptID="COMPAR_TIME_COMMENT", ), conceptID="COMPAR_TIME", ), conceptID="COMPARABILITY", ), M.ReportedAttribute( #COHERENCE M.Value(), M.ReportedAttribute( M.Value("Not applicable"), conceptID="COHER_X_DOM", ), M.ReportedAttribute( M.Value("Not available"), conceptID="COHER_INTERNAL", ), conceptID="COHERENCE", ), M.ReportedAttribute( M.Value("Not applicable"), conceptID="COST_BURDEN", ), M.ReportedAttribute( #DATA_REV M.Value(), M.ReportedAttribute( M.Value("Indicator assessments are peer reviewed and " "CSIs go under extended country review process. Previous versions of " "indicators are available. Data coming from EEA's data flows have their own " "QA procedure. The quality of third part data is under responsibily of " "respective data providers."), conceptID="REV_POLICY", ), M.ReportedAttribute( M.Value("Indicator assessments are peer reviewed and " "CSIs go under extended country review process. Previous versions of " "indicators are available. Data coming from EEA's data flows have their own " "QA procedure. The quality of third part data is under responsibily of " "respective data providers."), conceptID="REV_PRACTICE", ), conceptID="DATA_REV", ), M.ReportedAttribute( #STAT_PROCESS M.Value(), M.ReportedAttribute( M.Value(out), conceptID="SOURCE_TYPE", ), M.ReportedAttribute( M.Value("Not available"), conceptID="FREQ_COLL", ), M.ReportedAttribute( M.Value("Not available"), conceptID="COLL_METHOD", ), M.ReportedAttribute( M.Value("Not available"), conceptID="DATA_VALIDATION", ), M.ReportedAttribute( M.Value(methodology), conceptID="DATA_COMP", ), M.ReportedAttribute( M.Value(methodology_gapfilling), conceptID="ADJUSTMENT", ), M.ReportedAttribute( M.Value(frequency_of_updates), conceptID="FREQ_DISS", ), conceptID="STAT_PROCESS", ), M.ReportedAttribute( M.Value("Please note that more metadata and additional " "information about this indicator is available online at %s. For technical " "issues contact EEA web team at https://www.eea.europa.eu/help/contact-info. " "Metadata extracted automatically by EEA IMS at %s." % (self.context.absolute_url(), now.isoformat())), conceptID="COMMENT_DSET", ), ), ) xsi = "http://www.w3.org/2001/XMLSchema-instance" schemaLocation = "http://www.SDMX.org/resources/SDMXML/schemas/v2_0/genericmetadata SDMXGenericMetadata.xsd " \ "http://www.SDMX.org/resources/SDMXML/schemas/v2_0/message SDMXMessage.xsd" root = lxml.etree.Element(nsel("GenericMetadata"), attrib={ "{" + xsi + "}schemaLocation": schemaLocation}, nsmap=NAMESPACES) root.append(header) root.append(metadata) return lxml.etree.tostring(root, pretty_print=True, xml_declaration=True, encoding='UTF-8', standalone="yes")
def current_version(self): """current version""" return IGetVersions(self.context).versionId
def create_version(original, request=None): """Creates a new version of an Assessment. Returns the new version object """ #ZZZ: check if the following is still applied. It is true in any case #we want all Assessments for all spec versions to have the #same version id. #>>>if the parent Specification has versions, then the Assessment #needs to be a version of those assessments ver = base_create_version(original, reindex=False) # The assessment is no longer effective ver.setEffectiveDate(None) ver.setCreationDate(DateTime()) # Delete comment files file_ids = [] for file_ob in ver.getFolderContents(contentFilter={'portal_type': 'File'}, full_objects=True): file_ids.append(file_ob.getId()) ver.manage_delObjects(ids=file_ids) #ZZZ: should we reindex the objects here? for obj in ver.objectValues(): obj.setEffectiveDate(None) obj.setCreationDate(DateTime()) # The links to EEAFigures are updated to point to their latest version # Also, we need to add whatever new PolicyQuestions were added in # the Specification assessment = ver spec = assessment.aq_parent pqs = set(spec.objectIds("PolicyQuestion")) assigned_pqs = set() for ap in assessment.objectValues("AssessmentPart"): rels = [] for o in ap.getRelatedItems(): if o.meta_type == "EEAFigure": rels.append(IGetVersions(o).latest_version()) elif o.meta_type == "PolicyQuestion": rels.append(o) assigned_pqs.add(o.getId()) else: rels.append(o) ap.set_related_items(rels) ap.reindexObject() #creating missing policy questions new_pqs = pqs - assigned_pqs for oid in new_pqs: aid = assessment.invokeFactory( type_name="AssessmentPart", id=assessment.generateUniqueId("AssessmentPart"), ) ap = assessment[aid] ap.set_related_items(spec[oid]) try: ap.reindexObject() except AttributeError: continue # Set new state #IVersionControl(ver).setVersionId(version_id) #setting the version ID to the assessments group version id ver.reindexObject() notify(ObjectInitializedEvent(ver)) original.reindexObject() # _reindex(original) #some indexed values of the context may depend on versions return ver