def start(self, context, request, appstruct, **kw): all_archived = [] lac_catalog = find_catalog('lac') start_date = datetime.datetime.combine( datetime.datetime.now(), datetime.time(0, 0, 0, tzinfo=pytz.UTC)) start_date_index = lac_catalog['start_date'] query = start_date_index.notinrange(start_date, None) events_toarchive = find_entities( interfaces=[ICulturalEvent], metadata_filter={'states': ['published']}, add_query=query) for event in events_toarchive: event.state = PersistentList(['archived']) event.modified_at = datetime.datetime.now(tz=pytz.UTC) event.reindex() all_archived.append(event) dace_catalog = find_catalog('dace') states_index = dace_catalog['object_states'] object_provides_index = dace_catalog['object_provides'] query = start_date_index.notinrange(start_date, None) &\ states_index.any(['none', 'created']) &\ object_provides_index.any([ISchedule.__identifier__]) schedules_toarchive = query.execute() for schedule in schedules_toarchive: schedule.state = PersistentList(['archived']) schedule.modified_at = datetime.datetime.now(tz=pytz.UTC) schedule.reindex() all_archived.append(schedule) request.registry.notify(ActivityExecuted( self, all_archived, get_current())) return {}
def _get_query(self, user): dace_catalog = find_catalog('dace') novaideo_catalog = find_catalog('novaideo') object_authors_index = novaideo_catalog['object_authors'] container_index = dace_catalog['container_oid'] return container_index.eq(get_oid(self.context)) & \ object_authors_index.notany([get_oid(user)])
def bind(self): bindings = {} user = get_current(self.request) dace_catalog = find_catalog('dace') novaideo_catalog = find_catalog('novaideo') object_authors_index = novaideo_catalog['object_authors'] container_index = dace_catalog['container_oid'] query = container_index.eq(get_oid(self.context)) objects = find_entities( interfaces=[IAmendment], user=user, add_query=query) query = query & object_authors_index.any([get_oid(user)]) my_objs = find_entities( interfaces=[IAmendment], user=user, add_query=query) len_result = len(objects) len_my = len(my_objs) len_others = len_result - len_my bindings['user'] = user bindings['len_result'] = len_result bindings['len_my'] = len_my bindings['len_others'] = len_others setattr(self, '_bindings', bindings)
def find_entities(self): #page_limit, current_page, start, end = self._get_pagin_data() user = get_current() dace_catalog = find_catalog('dace') system_catalog = find_catalog('system') novaideo_catalog = find_catalog('novaideo') filter_schema = FindEntitiesJson(self.context, self.request) try: appstruct = filter_schema.calculate_posted_filter() except Exception as e: return {'items': [], 'total_count': 0, 'error': True, 'message': '{}: {}'.format( e.__class__.__name__, e.args[0])} if appstruct is None: return {'items': [], 'total_count': 0, 'error': True, 'message': 'appstruct is None'} content_types_tree = appstruct['metadata_filter'].get('content_types', {}).copy() content_types = list(content_types_tree.keys()) appstruct['metadata_filter'] = appstruct.get('metadata_filter', {}) appstruct['metadata_filter']['content_types'] = content_types appstruct['dace'] = dace_catalog appstruct['system'] = system_catalog appstruct['novaideo'] = novaideo_catalog entities = find_entities( user=user, sort_on='release_date', include_site=True, **appstruct) def dumps(obj): """return values of attributes descibed in the colander schema node 'node' """ registry = get_current_registry() content_type = registry.content.typeof(obj) fields = content_types_tree.get(content_type, {}) result, to_add = get_obj_value(obj, fields) if result is None: return {}, to_add return result, to_add def merge_items(old_items, new_items): for item in new_items: oid = item.get('@id', None) if oid and oid not in old_items: old_items[oid] = item elif oid: old_item = old_items[oid] old_items[oid] = merge_dicts([item, old_item]) items = {} for entity in entities: values, to_add = dumps(entity) to_add.append(values) merge_items(items, to_add) result = {'items': list(items.values()), 'total_count': len(items)} response = Response() response.content_type = "application/json" response.text = json.dumps(result, indent=2) return response
def get_content_stat(self, request): result = {} novaideo_index = find_catalog('novaideo') dace_catalog = find_catalog('dace') states_index = dace_catalog['object_states'] object_provides_index = dace_catalog['object_provides'] authors = novaideo_index['object_authors'] query = authors.any([self.context.__oid__]) & \ object_provides_index.any((Iidea.__identifier__,)) & \ states_index.any(['published']) result['nb_idea'] = query.execute().__len__() result['nb_question'] = 0 if 'question' in request.content_to_manage: query = authors.any([self.context.__oid__]) & \ object_provides_index.any((IQuestion.__identifier__,)) & \ states_index.any(['published']) result['nb_question'] = query.execute().__len__() result['nb_proposal'] = 0 if 'proposal' in request.content_to_manage: query = authors.any([self.context.__oid__]) & \ object_provides_index.any((IProposal.__identifier__,)) & \ states_index.notany(['archived', 'draft']) result['nb_proposal'] = query.execute().__len__() query = authors.any([self.context.__oid__]) & \ object_provides_index.notany( (Iidea.__identifier__, IQuestion.__identifier__, IProposal.__identifier__)) & \ states_index.any(['published']) result['nb_other'] = query.execute().__len__() return result
def start(self, context, request, appstruct, **kw): all_archived = [] lac_catalog = find_catalog('lac') start_date = datetime.datetime.combine( datetime.datetime.now(), datetime.time(0, 0, 0, tzinfo=pytz.UTC)) start_date_index = lac_catalog['start_date'] query = start_date_index.notinrange(start_date, None) events_toarchive = find_entities( interfaces=[ICulturalEvent], metadata_filter={'states': ['published']}, add_query=query) for event in events_toarchive: event.state = PersistentList(['archived']) event.modified_at = datetime.datetime.now(tz=pytz.UTC) event.reindex() all_archived.append(event) dace_catalog = find_catalog('dace') states_index = dace_catalog['object_states'] object_provides_index = dace_catalog['object_provides'] query = start_date_index.notinrange(start_date, None) &\ states_index.any(['none', 'created']) &\ object_provides_index.any([ISchedule.__identifier__]) schedules_toarchive = query.execute() for schedule in schedules_toarchive: schedule.state = PersistentList(['archived']) schedule.modified_at = datetime.datetime.now(tz=pytz.UTC) schedule.reindex() all_archived.append(schedule) request.registry.notify( ActivityExecuted(self, all_archived, get_current())) return {}
def find_yammer_content(interfaces): novaideo_catalog = find_catalog('novaideo') dace_catalog = find_catalog('dace') identifier_index = novaideo_catalog['identifier'] object_provides_index = dace_catalog['object_provides'] query = object_provides_index.any([i.__identifier__ for i in interfaces]) &\ identifier_index.any([YAMMER_CONNECTOR_ID]) return query.execute().all()
def all_alerts(self): lac_catalog = find_catalog('lac') dace_catalog = find_catalog('dace') alert_keys_index = lac_catalog['alert_keys'] object_provides_index = dace_catalog['object_provides'] query = object_provides_index.any([IAlert.__identifier__]) & \ alert_keys_index.any(self.get_alerts_keys()) return query.execute()
def get_user_by_token(token): current_user = None novaideo_catalog = find_catalog('novaideo') dace_catalog = find_catalog('dace') identifier_index = novaideo_catalog['api_token'] object_provides_index = dace_catalog['object_provides'] query = object_provides_index.any([IPerson.__identifier__]) &\ identifier_index.eq(token) users = list(query.execute().all()) return users[0] if users else None
def get_users_by_keywords(keywords): novaideo_catalog = find_catalog('novaideo') dace_catalog = find_catalog('dace') keywords_index = novaideo_catalog['object_keywords'] object_provides_index = dace_catalog['object_provides'] states_index = dace_catalog['object_states'] #query query = keywords_index.any(keywords) & \ object_provides_index.any(IPerson.__identifier__) & \ states_index.notany(('deactivated',)) return query.execute().all()
def _login_validator(node, value): dace_catalog = find_catalog('dace') novaideo_catalog = find_catalog('novaideo') identifier_index = novaideo_catalog['identifier'] object_provides_index = dace_catalog['object_provides'] query = object_provides_index.any([IPerson.__identifier__]) &\ identifier_index.any([value]) users = list(query.execute().all()) user = users[0] if users else None if user is None: raise colander.Invalid( node, _('No such user ${member}', mapping={'member': value}))
def all_alerts(self): novaideo_catalog = find_catalog('novaideo') dace_catalog = find_catalog('dace') alert_keys_index = novaideo_catalog['alert_keys'] alert_exclude_keys_index = novaideo_catalog['alert_exclude_keys'] object_provides_index = dace_catalog['object_provides'] exclude = [str(get_oid(self))] if self.mask: exclude.append(str(get_oid(self.mask))) query = object_provides_index.any([IAlert.__identifier__]) & \ alert_keys_index.any(self.get_alerts_keys()) & \ alert_exclude_keys_index.notany(exclude) return query.execute()
def init_contents_organizations(self): novaideo_catalog = find_catalog('novaideo') dace_catalog = find_catalog('dace') organizations_index = novaideo_catalog['organizations'] object_authors_index = novaideo_catalog['object_authors'] object_provides_index = dace_catalog['object_provides'] query = object_authors_index.any([get_oid(self)]) & \ object_provides_index.any( [Iidea.__identifier__, IProposal.__identifier__]) & \ organizations_index.any([0]) for entity in query.execute().all(): entity.init_organization() entity.reindex()
def content_types_query(node, **args): value = None if 'metadata_filter' in args: value = args['metadata_filter'] content_types = value.get('content_types', []) if value else [] request = args.get('request', None) if not request: request = get_current_request() searchable_contents = dict(core.get_searchable_content(request)) if not content_types: content_types = list(searchable_contents.keys()) interfaces = value.get('interfaces', []) if value else [] interfaces.extend(args.get('interfaces', [])) interfaces = list(set(interfaces)) if interfaces: interfaces = [i.__identifier__ for i in interfaces] else: interfaces = [list(searchable_contents[i]. __implemented__.interfaces())[0].__identifier__ for i in content_types if i in searchable_contents] #catalog dace_catalog = None if 'dace' in args: dace_catalog = args['dace'] else: dace_catalog = find_catalog('dace') #index object_provides_index = dace_catalog['object_provides'] return object_provides_index.any(interfaces)
def update(self): user = get_current() context_oid = get_oid(self.context) dace_index = find_catalog('dace') dace_container_oid = dace_index['container_oid'] query = dace_container_oid.eq(context_oid) objects = find_entities( user=user, interfaces=[ISReport], metadata_filter={ 'states': [self.report_state] }, add_query=query) url = self.request.resource_url( self.context, '', query={'view_report_state': self.report_state}) batch = Batch(objects, self.request, url=url, default_size=BATCH_DEFAULT_SIZE) self.title = _(self.title, mapping={'nb': batch.seqlen}) batch.target = "#results"+"-report-" + self.report_state.replace(' ', '') result_body, result = render_listing_objs( self.request, batch, user) values = {'bodies': result_body, 'batch': batch, 'empty_message': self.empty_message, 'empty_icon': self.empty_icon} body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) result['coordinates'] = {self.coordinates: [item]} return result
def getAllWorkItems(self, node_id=None): dace_catalog = find_catalog('dace') process_inst_uid_index = dace_catalog['process_inst_uid'] object_provides_index = dace_catalog['object_provides'] p_uid = get_oid(self, None) query = object_provides_index.any((IWorkItem.__identifier__,)) & \ process_inst_uid_index.any((int(p_uid),)) if node_id is not None: node_id_index = dace_catalog['node_id'] query = query & node_id_index.eq(self.id+'.'+node_id) workitems = query.execute().all() result = [] for wi in workitems: if wi is None: log.error('getAllWorkItems: one of the wi is None for process %s', p_uid) continue if isinstance(wi.node, SubProcess) and wi.node.sub_processes: for sub_process in wi.node.sub_processes: result.extend(sub_process.getAllWorkItems()) if not wi in result: result.append(wi) return result
def occurences_start(obj, propertyname, from_=None, until=None, hours=None, minutes=None): oid = get_oid(obj, None) if oid is not None: index = find_catalog('lac')[ dates_mapping.get( propertyname + '_start_date', propertyname + '_start_date')] results = index._rev_index.get(oid, ()) results = occurences_until(until, results, True) results = occurences_from(from_, results, True) results = [int2dt(d, hours, minutes) for d in results] else: start = getattr(obj, propertyname + '_start_date', None) if hours is not None and minutes is not None: start = datetime.datetime.combine( start, datetime.time(hours, minutes, 0, tzinfo=pytz.UTC)) recurrence = getattr(obj, propertyname + '_recurrence', '') if not recurrence: results = [start] else: results = list(recurrence_sequence_ical(start, recrule=recurrence, from_=from_, until=until)) return results
def find_directors(self): name = self.params('q') if name: user = get_current() page_limit, current_page, start, end = self._get_pagin_data() lac_index = find_catalog('lac') is_director_index = lac_index['is_director'] query = is_director_index.eq(True) if is_all_values_key(name): result = find_entities( user=user, interfaces=[IArtistInformationSheet], add_query=query) else: result = find_entities( user=user, interfaces=[IArtistInformationSheet], text_filter={'text_to_search': name}, add_query=query) total_count = len(result) if total_count >= start: result = list(result)[start:end] else: result = list(result)[:end] entries = [{'id': str(get_oid(e)), 'text': e.title} for e in result] result = {'items': entries, 'total_count': total_count} return result return {'items': [], 'total_count': 0}
def get_new_entities(entities): entities_with_ids = [ entity for entity in entities if entity.get('source_data', {}).get('id', None) ] entities_without_ids = [ entity for entity in entities if entity.get('source_data', {}).get('id', None) is None ] ids = { str(entity['source_data']['id'] + '_' + entity['source_data']['source_id']): entity for entity in entities_with_ids } lac_catalog = find_catalog('lac') object_id_index = lac_catalog['object_id'] # TODO really needed to wake up objects? current_objects = list(object_id_index.any(list(ids.keys())).execute()) current_objects_ids = [ str(getattr(entity, 'object_id', getattr(entity, '__oid__', None))) for entity in current_objects ] #recuperate new entities entities_to_import = [ entity for key, entity in ids.items() if key not in current_objects_ids ] entities_to_import.extend(entities_without_ids) return entities_to_import, current_objects
def run(): request = get_system_request() if request.user is None: # in test, db connection closed return catalog = find_catalog('dace') global last_transaction cache_key = _get_cache_key() last_transaction = last_transaction_by_machine.setdefault(cache_key, '') last_tid = catalog._p_jar.db().lastTransaction() if last_transaction != last_tid: last_transaction_by_machine[cache_key] = last_tid transaction.begin() try: system_actions = [ a for a in getAllSystemActions() if getattr(a, 'process', None) or a.isstart ] log.info("new zodb transactions, actions to check: %s", len(system_actions)) for action in system_actions: _call_action(action) except Exception as e: log.exception(e) log.info("actions to check: done") run_crawler()
def run(): request = get_system_request() if request.user is None: # in test, db connection closed return catalog = find_catalog('dace') global last_transaction cache_key = _get_cache_key() last_transaction = last_transaction_by_machine.setdefault(cache_key, '') last_tid = catalog._p_jar.db().lastTransaction() if last_transaction != last_tid: last_transaction_by_machine[cache_key] = last_tid transaction.begin() try: system_actions = [a for a in getAllSystemActions() if getattr(a, 'process', None) or a.isstart] log.info("new zodb transactions, actions to check: %s", len(system_actions)) for action in system_actions: _call_action(action) except Exception as e: log.exception(e) log.info("actions to check: done") run_crawler()
def occurences_start(obj, propertyname, from_=None, until=None, hours=None, minutes=None): oid = get_oid(obj, None) if oid is not None: index = find_catalog('lac')[dates_mapping.get( propertyname + '_start_date', propertyname + '_start_date')] results = index._rev_index.get(oid, ()) results = occurences_until(until, results, True) results = occurences_from(from_, results, True) results = [int2dt(d, hours, minutes) for d in results] else: start = getattr(obj, propertyname + '_start_date', None) if hours is not None and minutes is not None: start = datetime.datetime.combine( start, datetime.time(hours, minutes, 0, tzinfo=pytz.UTC)) recurrence = getattr(obj, propertyname + '_recurrence', '') if not recurrence: results = [start] else: results = list( recurrence_sequence_ical(start, recrule=recurrence, from_=from_, until=until)) return results
def user_invariant(self, appstruct): context = self.bindings['context'] first_name = appstruct.get('first_name', None) last_name = appstruct.get('last_name', None) birth_date = appstruct.get('birth_date', None) birthplace = appstruct.get('birthplace', None) if first_name and last_name and birth_date and birthplace: try: birth_date = colander.iso8601.parse_date(birth_date) birth_date = birth_date.date() except colander.iso8601.ParseError as e: return key = first_name + last_name + birthplace + birth_date.strftime( "%d/%m/%Y") key = normalize_title(key).replace(' ', '') novaideo_catalog = find_catalog('novaideo') identifier_index = novaideo_catalog['identifier'] query = identifier_index.any([key]) users = list(query.execute().all()) if context in users: users.remove(context) if users: raise colander.Invalid(self, _('User already exists'))
def authors_analyzer(node, source, validated, validated_value): """Return for example dict([('7422658066368290778', 1))]) 7422658066368290778 is the oid of the Person object """ validated_value_ = [] if 'contribution_filter' in validated: validated_value_ = validated['contribution_filter'].pop( 'authors', []) objects = source(**validated) index = find_catalog('novaideo')['object_authors'] intersection = index.family.IF.intersection object_ids = getattr(objects, 'ids', objects) if isinstance(object_ids, (list, types.GeneratorType)): object_ids = index.family.IF.Set(object_ids) result = {} for author in validated_value_: author_oid = get_oid(author) oids = index._fwd_index.get(author_oid) if oids: count = len(intersection(oids, object_ids)) else: count = 0 result[str(author_oid)] = count return {'authors': result}
def clean_reviews(root, registry): import json from substanced.util import get_oid from dace.util import find_catalog wapis = [s for s in root.site_folders if s.title == 'Sortir Wapi'] lilles = [s for s in root.site_folders if s.title == 'Sortir Lille'] wapi_site = wapis[0] wapi_site_oid = get_oid(wapi_site) lille_site = lilles[0] lille_site_oid = get_oid(lille_site) with open('critiques.json') as data_file: entities = json.load(data_file) entities_with_ids = [entity for entity in entities if entity.get('source_data', {}).get('id', None)] ids = {str(entity['source_data']['id'] + '_' + entity['source_data']['source_id']): entity for entity in entities_with_ids} lac_catalog = find_catalog('lac') object_id_index = lac_catalog['object_id'] reviews = object_id_index.any(list(ids.keys())).execute() len_entities = str(len(reviews)) for index, review in enumerate(reviews): reviwe_site = getattr(review, 'source_data', {}).get('site', None) if reviwe_site != 'wapi': review.access_control = PersistentList( [wapi_site_oid, lille_site_oid]) review.reindex() log.info("Wapi-Lille: "+str(index) + "/" + len_entities) else: log.info("Wapi: "+str(index) + "/" + len_entities) log.info('Clean review evolved.')
def get_related_contents(self, user, interface): lac_catalog = find_catalog('lac') venue_index = lac_catalog['object_venue'] query = venue_index.any([self.context.get_id()]) objects = find_entities( user=user, interfaces=[interface], metadata_filter={'states': ['published']}, add_query=query, include_site=True) batch = Batch(objects, self.request, default_size=core.BATCH_DEFAULT_SIZE) batch.target = "#results_contents"+str(interface.__name__) len_result = batch.seqlen result_body = [] for obj in batch: render_dict = {'object': obj, 'current_user': user, 'state': get_states_mapping(user, obj, getattr(obj, 'state_or_none', [None])[0])} body = self.content(args=render_dict, template=obj.templates['default'])['body'] result_body.append(body) values = {'bodies': result_body, 'batch': batch} contents_body = self.content( args=values, template=self.related_events_template)['body'] return ((result_body and contents_body) or None), len_result
def update(self): user = get_current() context_oid = get_oid(self.context) dace_index = find_catalog('dace') dace_container_oid = dace_index['container_oid'] query = dace_container_oid.eq(context_oid) objects = find_entities( user=user, interfaces=[ISReport], metadata_filter={'states': [self.report_state]}, add_query=query) url = self.request.resource_url( self.context, '', query={'view_report_state': self.report_state}) batch = Batch(objects, self.request, url=url, default_size=BATCH_DEFAULT_SIZE) self.title = _(self.title, mapping={'nb': batch.seqlen}) batch.target = "#results" + "-report-" + self.report_state.replace( ' ', '') result_body, result = render_listing_objs(self.request, batch, user) values = { 'bodies': result_body, 'batch': batch, 'empty_message': self.empty_message, 'empty_icon': self.empty_icon } body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) result['coordinates'] = {self.coordinates: [item]} return result
def find_directors(self): name = self.params('q') if name: user = get_current() page_limit, current_page, start, end = self._get_pagin_data() lac_index = find_catalog('lac') is_director_index = lac_index['is_director'] query = is_director_index.eq(True) if is_all_values_key(name): result = find_entities(user=user, interfaces=[IArtistInformationSheet], add_query=query) else: result = find_entities(user=user, interfaces=[IArtistInformationSheet], text_filter={'text_to_search': name}, add_query=query) total_count = len(result) if total_count >= start: result = list(result)[start:end] else: result = list(result)[:end] entries = [{ 'id': str(get_oid(e)), 'text': e.title } for e in result] result = {'items': entries, 'total_count': total_count} return result return {'items': [], 'total_count': 0}
def start(self, context, request, appstruct, **kw): user = get_current() report = appstruct['_object_data'] context.addtoproperty('censoring_reason', report) grant_roles(user=user, roles=(('Owner', report), )) report.setproperty('author', user) report.reindex() context_oid = get_oid(context) dace_index = find_catalog('dace') dace_container_oid = dace_index['container_oid'] query = dace_container_oid.eq(context_oid) reports = find_entities(interfaces=[ISReport], metadata_filter={'states': ['pending']}, add_query=query) for report in reports: report.state = PersistentList(['processed']) report.reindex() context.init_len_current_reports() adapter = get_current_registry().queryAdapter(context, ISignalableObject) if adapter is not None: context.state.remove('reported') adapter.censor(request) return {}
def get_adapted_content( email, request, interfaces=[Iidea], content_types=['idea'], last_sending_date=None): body = '' novaideo_catalog = find_catalog('novaideo') identifier_index = novaideo_catalog['identifier'] query = identifier_index.any([email]) users = list(query.execute().all()) member = users[0] if users else None query = None if last_sending_date: published_at_index = novaideo_catalog['published_at'] query = published_at_index.gt(last_sending_date) entities = find_entities( interfaces=interfaces, metadata_filter={ 'content_types': content_types, 'states': ['published'], 'keywords': getattr(member, 'keywords', [])}, sort_on='release_date', add_query=query) result = [] for obj in entities: result.append(obj) if len(result) == 5: break if result: body = renderers.render( CONTENT_TEMPLATE, {'entities': result}, request) return body
def start(self, context, request, appstruct, **kw): user = get_current() report = appstruct['_object_data'] context.addtoproperty('censoring_reason', report) grant_roles(user=user, roles=(('Owner', report), )) report.setproperty('author', user) report.reindex() context_oid = get_oid(context) dace_index = find_catalog('dace') dace_container_oid = dace_index['container_oid'] query = dace_container_oid.eq(context_oid) reports = find_entities( interfaces=[ISReport], metadata_filter={ 'states': ['pending']}, add_query=query) for report in reports: report.state = PersistentList(['processed']) report.reindex() context.init_len_current_reports() adapter = get_current_registry().queryAdapter( context, ISignalableObject) if adapter is not None: context.state.remove('reported') adapter.censor(request) return {}
def getAllWorkItems(self, node_id=None): dace_catalog = find_catalog('dace') process_inst_uid_index = dace_catalog['process_inst_uid'] object_provides_index = dace_catalog['object_provides'] p_uid = get_oid(self, None) query = object_provides_index.any((IWorkItem.__identifier__,)) & \ process_inst_uid_index.any((int(p_uid),)) if node_id is not None: node_id_index = dace_catalog['node_id'] query = query & node_id_index.eq(self.id + '.' + node_id) workitems = query.execute().all() result = [] for wi in workitems: if wi is None: log.error( 'getAllWorkItems: one of the wi is None for process %s', p_uid) continue if isinstance(wi.node, SubProcess) and wi.node.sub_processes: for sub_process in wi.node.sub_processes: result.extend(sub_process.getAllWorkItems()) if not (wi in result): result.append(wi) return result
def get_related_contents(self, user, interface): lac_catalog = find_catalog('lac') venue_index = lac_catalog['object_venue'] query = venue_index.any([self.context.get_id()]) objects = find_entities(user=user, interfaces=[interface], metadata_filter={'states': ['published']}, add_query=query, include_site=True) batch = Batch(objects, self.request, default_size=core.BATCH_DEFAULT_SIZE) batch.target = "#results_contents" + str(interface.__name__) len_result = batch.seqlen result_body = [] for obj in batch: render_dict = { 'object': obj, 'current_user': user, 'state': get_states_mapping(user, obj, getattr(obj, 'state_or_none', [None])[0]) } body = self.content(args=render_dict, template=obj.templates['default'])['body'] result_body.append(body) values = {'bodies': result_body, 'batch': batch} contents_body = self.content( args=values, template=self.related_events_template)['body'] return ((result_body and contents_body) or None), len_result
def started_processes(self): dace_catalog = find_catalog('dace') object_provides_index = dace_catalog['object_provides'] processid_index = dace_catalog['process_id'] query = object_provides_index.any( (IProcess.__identifier__,)) & \ processid_index.eq(self.id) return list(query.execute().all())
def update(self): self.execute(None) user = get_current() is_manager = has_role(user=user, role=('PortalManager', )) filters = [ {'metadata_filter': { 'content_types': ['person'] }} ] filter_form, filter_data = self._add_filter(user, is_manager) args = merge_with_filter_view(self, {}) args['request'] = self.request objects = find_entities( user=user, filters=filters, **args) objects, sort_body = sort_view_objects( self, objects, ['person'], user) url = self.request.resource_url(self.context, self.name) batch = Batch(objects, self.request, url=url, default_size=BATCH_DEFAULT_SIZE) batch.target = "#results_users" len_result = batch.seqlen index = str(len_result) if len_result > 1: index = '*' self.title = self._get_title( index=index, len_result=len_result, user=user) filter_data['filter_message'] = self.title filter_body = self.filter_instance.get_body(filter_data) result_body, result = render_listing_objs( self.request, batch, user) novaideo_catalog = find_catalog('novaideo') last_connection_index = novaideo_catalog['last_connection'] current_date = datetime.datetime.combine( datetime.datetime.now(), datetime.time(0, 0, 0, tzinfo=pytz.UTC)) inactive_users = find_users( last_connection_index, current_date, (INACTIVITY_DURATION, None)) if filter_form: result = merge_dicts( {'css_links': filter_form['css_links'], 'js_links': filter_form['js_links'] }, result) values = {'bodies': result_body, 'batch': batch, 'is_manager': is_manager, 'inactivity_duration': INACTIVITY_DURATION, 'inactive_users': inactive_users.__len__(), 'filter_body': filter_body, 'sort_body': sort_body} body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) result['coordinates'] = {self.coordinates: [item]} return result
def change_index_for_object_zipcode_txt(root, registry): from substanced.util import find_catalog catalog = find_catalog(root, 'lac') from lac.catalog import TextWithoutScoreIndex idx = catalog['object_zipcode_txt'] idx.__class__ = TextWithoutScoreIndex del catalog['object_zipcode_txt'] catalog['object_zipcode_txt'] = idx log.info('Changed index for object_zipcode_txt')
def get_organizations_by_evaluations( filter_, user, root, date_from, date_to): novaideo_catalog = find_catalog('novaideo') date_index = novaideo_catalog['published_at'] query = None if date_from: date_from = datetime.datetime.combine( date_from, datetime.datetime.min.time()) date_from = date_from.replace(tzinfo=pytz.UTC) query = date_index.gt(date_from) if date_to: date_to = datetime.datetime.combine( date_to, datetime.datetime.min.time()) date_to = date_to.replace(tzinfo=pytz.UTC) if query is None: query = date_index.lt(date_to) else: query = query & date_index.lt(date_to) objects = find_entities( user=user, add_query=query, **filter_) index = novaideo_catalog['organizations'] support = novaideo_catalog['support'] oppose = novaideo_catalog['oppose'] intersection = index.family.IF.intersection object_ids = getattr(objects, 'ids', objects) if isinstance(object_ids, (list, types.GeneratorType)): object_ids = index.family.IF.Set(object_ids) # calculate sum of support / sum of opposition result = {} for struct_id, oids in index._fwd_index.items(): struct = get_obj(struct_id) if struct: structoids = intersection(oids, object_ids) support_nb = 0 for nb, supportoids in support._fwd_index.items(): if nb > 0: support_nb += nb * len(intersection(supportoids, structoids)) oppose_nb = 0 for nb, opposeoids in oppose._fwd_index.items(): if nb > 0: oppose_nb += nb * len(intersection(opposeoids, structoids)) result[struct_id] = { 'support': support_nb, 'opposition': oppose_nb } return result, object_ids.__len__()
def get_all_user_contributions(user, interfaces=[IEntity]): novaideo_index = find_catalog('novaideo') object_authors_index = novaideo_index['object_authors'] query = object_authors_index.any([get_oid(user)]) return find_entities( interfaces=interfaces, metadata_filter={ 'states': ['published']}, add_query=query)
def persons_contacted(self): """ Return all contacted persons""" dace_catalog = find_catalog('dace') novaideo_catalog = find_catalog('novaideo') identifier_index = novaideo_catalog['identifier'] object_provides_index = dace_catalog['object_provides'] result = [] for email in self._email_persons_contacted: query = object_provides_index.any([IPerson.__identifier__]) &\ identifier_index.any([email]) users = list(query.execute().all()) user = users[0] if users else None if user is not None: result.append(user) else: result.append(email.split('@')[0].split('+')[0]) return set(result)
def login(context, request): login_data = json.loads(request.body.decode()) login = login_data.get('login', None) password = login_data.get('password', None) token = login_data.get('token', None) logged_user = None if token: logged_user = auth_user(token, request) if login and password: novaideo_catalog = find_catalog('novaideo') dace_catalog = find_catalog('dace') identifier_index = novaideo_catalog['identifier'] object_provides_index = dace_catalog['object_provides'] query = object_provides_index.any([IPerson.__identifier__]) &\ identifier_index.any([login]) users = list(query.execute().all()) user = users[0] if users else None valid_check = user and user.check_password(password) if valid_check and \ (has_role(user=user, role=('SiteAdmin', )) or \ 'active' in getattr(user, 'state', [])): logged_user = user if getattr(logged_user, 'api_token', None) is None: logged_user.api_token = uuid.uuid4().hex if logged_user: headers = remember(request, get_oid(logged_user)) request.registry.notify(LoggedIn(login, logged_user, context, request)) logged_user.last_connection = datetime.datetime.now(tz=pytz.UTC) request.response.headerlist.extend(headers) if hasattr(logged_user, 'reindex'): logged_user.reindex() return { 'status': True, 'token': logged_user.api_token } return { 'status': False, 'token': None }
def get_evaluation_stat(self, request): novaideo_index = find_catalog('novaideo') dace_catalog = find_catalog('dace') states_index = dace_catalog['object_states'] object_provides_index = dace_catalog['object_provides'] authors = novaideo_index['object_authors'] query = authors.any([self.context.__oid__]) objects = query.execute() support = novaideo_index['support'] oppose = novaideo_index['oppose'] intersection = authors.family.IF.intersection object_ids = getattr(objects, 'ids', objects) if isinstance(object_ids, (list, types.GeneratorType)): object_ids = authors.family.IF.Set(object_ids) # calculate sum of support / sum of opposition support_nb = 0 for nb, supportoids in support._fwd_index.items(): if nb > 0: support_nb += nb * len(intersection(supportoids, object_ids)) oppose_nb = 0 for nb, opposeoids in oppose._fwd_index.items(): if nb > 0: oppose_nb += nb * len(intersection(opposeoids, object_ids)) localizer = request.localizer items = { 'support': { 'value': support_nb, 'color': SUPPORT_COLOR, 'translation': localizer.translate(_('Support', context='analytics')) }, 'opposition': { 'value': oppose_nb, 'color': OPPOSE_COLOR, 'translation': localizer.translate(_('Opposition')) } } return items
def get_examination_stat(self, request): novaideo_index = find_catalog('novaideo') dace_catalog = find_catalog('dace') states_index = dace_catalog['object_states'] object_provides_index = dace_catalog['object_provides'] authors = novaideo_index['object_authors'] object_keywords = novaideo_index['object_keywords'] items = {} localizer = request.localizer for examination, data in EXAMINATION_VALUES.items(): query = authors.any([self.context.__oid__]) & \ states_index.any([examination]) items[examination] = { 'value': len(query.execute()), 'color': data['color'], 'translation': localizer.translate(data['title']) } return items
def search(text, content_types, user): if text: text = [t.lower() for t in re.split(', *', text)] interfaces = [SEARCHABLE_CONTENT[i].__identifier__ for i in content_types] #catalog dace_catalog = find_catalog('dace') novaideo_catalog = find_catalog('novaideo') system_catalog = find_catalog('system') #index title_index = dace_catalog['object_title'] description_index = dace_catalog['object_description'] states_index = dace_catalog['object_states'] object_provides_index = dace_catalog['object_provides'] keywords_index = novaideo_catalog['object_keywords'] text_index = system_catalog['text'] name_index = system_catalog['name'] #query query = None if text: query = keywords_index.any(text) | \ name_index.any(text) | \ states_index.any(text) for keyword in text: query = query | \ title_index.contains(keyword) | \ description_index.contains(keyword) | \ text_index.contains(keyword) if query is None: query = object_provides_index.any(interfaces) else: query = (query) & object_provides_index.any(interfaces) query = (query) & states_index.notany(('archived',)) resultset = query.execute() objects = [o for o in resultset.all() if can_access(user, o)] objects = sorted(objects, key=lambda e: getattr(e, 'modified_at', datetime.datetime.today()), reverse=True) return objects
def started_processes(self): dace_catalog = find_catalog('dace') object_provides_index = dace_catalog['object_provides'] processid_index = dace_catalog['process_id'] query = object_provides_index.any((IProcess.__identifier__,)) & \ processid_index.eq(self.id) results = query.execute().all() processes = [p for p in results] #processes.sort() return processes
def update(self): body = '' result = {} if self.isactive or self.params('on_demand') == 'load': current_user = get_current() validated = { 'metadata_filter': { 'content_types': [self.content_type], 'states': ['active', 'published'] } } novaideo_catalog = find_catalog('novaideo') organizations_index = novaideo_catalog['organizations'] query = organizations_index.any([self.context.__oid__]) objects = find_entities(user=current_user, filters=[validated], add_query=query) sort_url = self.request.resource_url( self.context, '@@index', query={'view_content_attr': self.content_id}) objects, sort_body = sort_view_objects(self, objects, [self.content_type], current_user, sort_url=sort_url) url = self.request.resource_url( self.context, '@@index', query={'view_content_attr': self.content_id}) batch = Batch(objects, self.request, url=url, default_size=BATCH_DEFAULT_SIZE) batch.target = "#results-" + self.content_type self.title = _(self.title, mapping={'nb': batch.seqlen}) result_body, result = render_listing_objs( self.request, batch, current_user, display_state=getattr(self, 'display_state', True)) values = { 'bodies': result_body, 'batch': batch, 'empty_message': self.empty_message, 'empty_icon': self.empty_icon, 'sort_body': sort_body } body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) item['isactive'] = getattr(self, 'isactive', False) result['coordinates'] = {self.coordinates: [item]} return result
def start_intermediate_events_callback(): catalog = find_catalog('dace') query = catalog['object_provides'].any((IWorkItem.__identifier__, )) results = query.execute().all() for wi in results: node = getattr(wi, 'node', None) if isinstance(node, IntermediateCatchEvent): if node.execution_prepared: log.info("Calling %s.eventKind.prepare_for_execution()", node) node.eventKind.prepare_for_execution(True) # commit to execute after commit hooks transaction.commit()
def get_or_create_user(request, appstruct, set_source_data=True): user_id = appstruct.get('user_data', {}).get('email', None) if not user_id: source_data = appstruct.get('source_data', {}) user_id = source_data.get('app_name', '') + '_' +\ source_data.get('id', '') novaideo_catalog = find_catalog('novaideo') dace_catalog = find_catalog('dace') identifier_index = novaideo_catalog['identifier'] object_provides_index = dace_catalog['object_provides'] query = object_provides_index.any([IPerson.__identifier__]) &\ identifier_index.any([user_id]) users = list(query.execute().all()) user = users[0] if users else None if user is None: user = create_user(request, appstruct) elif set_source_data: user.set_source_data(appstruct.get('source_data', {})) return user
def update(self): self.execute(None) user = get_current() is_manager = has_role(user=user, role=('PortalManager', )) filters = [{'metadata_filter': {'content_types': ['person']}}] filter_form, filter_data = self._add_filter(user, is_manager) args = merge_with_filter_view(self, {}) args['request'] = self.request objects = find_entities(user=user, filters=filters, **args) objects, sort_body = sort_view_objects(self, objects, ['person'], user) url = self.request.resource_url(self.context, self.name) batch = Batch(objects, self.request, url=url, default_size=BATCH_DEFAULT_SIZE) batch.target = "#results_users" len_result = batch.seqlen index = str(len_result) if len_result > 1: index = '*' self.title = self._get_title(index=index, len_result=len_result, user=user) filter_data['filter_message'] = self.title filter_body = self.filter_instance.get_body(filter_data) result_body, result = render_listing_objs(self.request, batch, user) novaideo_catalog = find_catalog('novaideo') last_connection_index = novaideo_catalog['last_connection'] current_date = datetime.datetime.combine( datetime.datetime.now(), datetime.time(0, 0, 0, tzinfo=pytz.UTC)) inactive_users = find_users(last_connection_index, current_date, (INACTIVITY_DURATION, None)) if filter_form: result = merge_dicts( { 'css_links': filter_form['css_links'], 'js_links': filter_form['js_links'] }, result) values = { 'bodies': result_body, 'batch': batch, 'is_manager': is_manager, 'inactivity_duration': INACTIVITY_DURATION, 'inactive_users': inactive_users.__len__(), 'filter_body': filter_body, 'sort_body': sort_body } body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) result['coordinates'] = {self.coordinates: [item]} return result
def email_validator(node, kw): context = node.bindings['context'] novaideo_catalog = find_catalog('novaideo') identifier_index = novaideo_catalog['identifier'] query = identifier_index.any([kw]) users = list(query.execute().all()) if context in users: users.remove(context) if users: raise colander.Invalid( node, _('${email} email address already in use', mapping={'email': kw}))
def source(**args): default_content = [self.content_type] novaideo_index = find_catalog('novaideo') challenges = novaideo_index['challenges'] query = challenges.any([self.context.__oid__]) filter_ = { 'metadata_filter': { 'content_types': default_content, 'states': ['active', 'published']} } objects = find_entities( user=user, filters=[filter_], add_query=query, **args) return objects