def find_cinema_venue(self): name = self.params('q') if name: user = get_current() page_limit, current_page, start, end = self._get_pagin_data() if is_all_values_key(name): result = find_entities(user=user, interfaces=[IVenue], keywords=['cinema']) else: result = find_entities(user=user, interfaces=[IVenue], text_filter={'text_to_search': name}, keywords=['cinema']) result = [res for res in result] if len(result) >= start: result = result[start:end] else: result = result[:end] entries = [{'id': str(get_oid(e)), 'text': e.title, 'description': e.description} for e in result] result = {'items': entries, 'total_count': len(result)} return result return {'items': [], 'total_count': 0}
def find_directors(self): name = self.params('q') if name: user = get_current() page_limit, current_page, start, end = self._get_pagin_data() lac_index = find_catalog('lac') is_director_index = lac_index['is_director'] query = is_director_index.eq(True) if is_all_values_key(name): result = find_entities(user=user, interfaces=[IArtistInformationSheet], add_query=query) else: result = find_entities(user=user, interfaces=[IArtistInformationSheet], text_filter={'text_to_search': name}, add_query=query) total_count = len(result) if total_count >= start: result = list(result)[start:end] else: result = list(result)[:end] entries = [{ 'id': str(get_oid(e)), 'text': e.title } for e in result] result = {'items': entries, 'total_count': total_count} return result return {'items': [], 'total_count': 0}
def find_user(self): name = self.params('q') if name: page_limit, current_page, start, end = self._get_pagin_data() if is_all_values_key(name): result = find_entities(interfaces=[IPerson], metadata_filter={'states': ['active']}) else: result = find_entities(interfaces=[IPerson], text_filter={'text_to_search': name}, metadata_filter={'states': ['active']}) result = [res for res in result] if len(result) >= start: result = result[start:end] else: result = result[:end] entries = [{ 'id': str(get_oid(e)), 'text': e.title } for e in result] result = {'items': entries, 'total_count': len(result)} return result return {'items': [], 'total_count': 0}
def find_labels(self): name = self.params('q') if name: user = get_current() page_limit, current_page, start, end = self._get_pagin_data() if is_all_values_key(name): result = find_entities( user=user, interfaces=[ILabel]) else: result = find_entities( user=user, interfaces=[ILabel], text_filter={'text_to_search': name}) total_count = len(result) if total_count >= start: result = list(result)[start:end] else: result = list(result)[:end] entries = [{'id': str(get_oid(e)), 'text': e.title, 'img': e.picture.url} for e in result] result = {'items': entries, 'total_count': total_count} return result return {'items': [], 'total_count': 0}
def find_cinema_venue(self): name = self.params('q') if name: user = get_current() page_limit, current_page, start, end = self._get_pagin_data() if is_all_values_key(name): result = find_entities(user=user, interfaces=[IVenue], keywords=['cinema']) else: result = find_entities(user=user, interfaces=[IVenue], text_filter={'text_to_search': name}, keywords=['cinema']) result = [res for res in result] if len(result) >= start: result = result[start:end] else: result = result[:end] entries = [{ 'id': str(get_oid(e)), 'text': e.title, 'description': e.description } for e in result] result = {'items': entries, 'total_count': len(result)} return result return {'items': [], 'total_count': 0}
def find_entity(self, content_types=[]): name = self.params('q') if name: user = get_current() page_limit, current_page, start, end = self._get_pagin_data() if is_all_values_key(name): result = find_entities( metadata_filter={ 'content_types': content_types, 'states': ['published', 'active']}, user=user) else: result = find_entities( metadata_filter={ 'content_types': content_types, 'states': ['published', 'active']}, user=user, text_filter={'text_to_search': name}) total_count = len(result) if total_count >= start: result = list(result)[start:end] else: result = list(result)[:end] entries = [{'id': str(get_oid(e)), 'text': e.title, 'icon': getattr( e, 'icon', 'glyphicon glyphicon-question-sign')} for e in result] result = {'items': entries, 'total_count': total_count} return result return {'items': [], 'total_count': 0}
def find_artists(self): name = self.params('q') if name: user = get_current() page_limit, current_page, start, end = self._get_pagin_data() if is_all_values_key(name): result = find_entities( user=user, interfaces=[IArtistInformationSheet]) else: result = find_entities( user=user, interfaces=[IArtistInformationSheet], text_filter={'text_to_search': name}) total_count = len(result) if total_count >= start: result = list(result)[start:end] else: result = list(result)[:end] entries = [{'id': str(get_oid(e)), 'text': e.title, 'description': e.presentation_text(200)} for e in result] result = {'items': entries, 'total_count': total_count} return result return {'items': [], 'total_count': 0}
def find_directors(self): name = self.params('q') if name: user = get_current() page_limit, current_page, start, end = self._get_pagin_data() lac_index = find_catalog('lac') is_director_index = lac_index['is_director'] query = is_director_index.eq(True) if is_all_values_key(name): result = find_entities( user=user, interfaces=[IArtistInformationSheet], add_query=query) else: result = find_entities( user=user, interfaces=[IArtistInformationSheet], text_filter={'text_to_search': name}, add_query=query) total_count = len(result) if total_count >= start: result = list(result)[start:end] else: result = list(result)[:end] entries = [{'id': str(get_oid(e)), 'text': e.title} for e in result] result = {'items': entries, 'total_count': total_count} return result return {'items': [], 'total_count': 0}
def update_zipcodes(root, registry): from lac.views.filter import find_entities from lac.content.interface import ( IVenue, IStructureBase) contents = find_entities(interfaces=[IVenue]) _update_zipcode_venue(contents) contents = find_entities( interfaces=[IVenue], metadata_filter={'states': ['archived']}) _update_zipcode_venue(contents) contents = find_entities(interfaces=[IStructureBase]) len_entities = str(len(contents)) for index, structure in enumerate(contents): addresses = getattr(structure, 'address', []) result = [] for address in addresses: zipcodes = address.get('zipcode', []) if zipcodes is not None and isinstance(zipcodes, (set, list)): zipcodes = list(zipcodes) address['zipcode'] = zipcodes[0] if zipcodes else None result.append(address) structure.address = PersistentList(result) structure.reindex() if index % 1000 == 0: log.info("**** Commit ****") transaction.commit() log.info(str(index) + "/" + len_entities) log.info('Addresses evolved.')
def update_zipcodes(root, registry): from lac.views.filter import find_entities from lac.content.interface import (IVenue, IStructureBase) contents = find_entities(interfaces=[IVenue]) _update_zipcode_venue(contents) contents = find_entities(interfaces=[IVenue], metadata_filter={'states': ['archived']}) _update_zipcode_venue(contents) contents = find_entities(interfaces=[IStructureBase]) len_entities = str(len(contents)) for index, structure in enumerate(contents): addresses = getattr(structure, 'address', []) result = [] for address in addresses: zipcodes = address.get('zipcode', []) if zipcodes is not None and isinstance(zipcodes, (set, list)): zipcodes = list(zipcodes) address['zipcode'] = zipcodes[0] if zipcodes else None result.append(address) structure.address = PersistentList(result) structure.reindex() if index % 1000 == 0: log.info("**** Commit ****") transaction.commit() log.info(str(index) + "/" + len_entities) log.info('Addresses evolved.')
def find_labels(self): name = self.params('q') if name: user = get_current() page_limit, current_page, start, end = self._get_pagin_data() if is_all_values_key(name): result = find_entities(user=user, interfaces=[ILabel]) else: result = find_entities(user=user, interfaces=[ILabel], text_filter={'text_to_search': name}) total_count = len(result) if total_count >= start: result = list(result)[start:end] else: result = list(result)[:end] entries = [{ 'id': str(get_oid(e)), 'text': e.title, 'img': e.picture.url } for e in result] result = {'items': entries, 'total_count': total_count} return result return {'items': [], 'total_count': 0}
def find_artists(self): name = self.params('q') if name: user = get_current() page_limit, current_page, start, end = self._get_pagin_data() if is_all_values_key(name): result = find_entities(user=user, interfaces=[IArtistInformationSheet]) else: result = find_entities(user=user, interfaces=[IArtistInformationSheet], text_filter={'text_to_search': name}) total_count = len(result) if total_count >= start: result = list(result)[start:end] else: result = list(result)[:end] entries = [{ 'id': str(get_oid(e)), 'text': e.title, 'description': e.presentation_text(200) } for e in result] result = {'items': entries, 'total_count': total_count} return result return {'items': [], 'total_count': 0}
def find_venues(self): name = self.params('q') if name: venue_history = self.params('venue_history') venue_history = True if venue_history and \ venue_history == 'true' else False user = get_current() page_limit, current_page, start, end = self._get_pagin_data() result = [] venues = [] if venue_history: #TODO optimization contributions = getattr(user, 'all_contributions', []) for contribution in contributions: if isinstance(contribution, Venue) and \ 'archived' not in contribution.state: venues.append(contribution) if isinstance(contribution, CulturalEvent): venues.extend([ v for v in contribution.venues if 'archived' not in v.state ]) venues = list(set(venues)) venues = [get_oid(v) for v in venues] else: venues = None if is_all_values_key(name): result = find_entities(user=user, interfaces=[IVenue], intersect=venues) else: result = find_entities(user=user, interfaces=[IVenue], text_filter={'text_to_search': name}, intersect=venues) result = [res for res in result] total_count = len(result) if len(result) >= start: result = result[start:end] else: result = result[:end] entries = [{ 'id': e.get_id(), 'text': e.title, 'city': e.city, 'description': e.presentation_text(200) } for e in result] result = {'items': entries, 'total_count': total_count} return result return {'items': [], 'total_count': 0}
def find_venues(self): name = self.params('q') if name: venue_history = self.params('venue_history') venue_history = True if venue_history and \ venue_history == 'true' else False user = get_current() page_limit, current_page, start, end = self._get_pagin_data() result = [] venues = [] if venue_history: #TODO optimization contributions = getattr(user, 'all_contributions', []) for contribution in contributions: if isinstance(contribution, Venue) and \ 'archived' not in contribution.state: venues.append(contribution) if isinstance(contribution, CulturalEvent): venues.extend([v for v in contribution.venues if 'archived' not in v.state]) venues = list(set(venues)) venues = [get_oid(v) for v in venues] else: venues = None if is_all_values_key(name): result = find_entities(user=user, interfaces=[IVenue], intersect=venues) else: result = find_entities(user=user, interfaces=[IVenue], text_filter={'text_to_search': name}, intersect=venues) result = [res for res in result] total_count = len(result) if len(result) >= start: result = result[start:end] else: result = result[:end] entries = [{'id': e.get_id(), 'text': e.title, 'city': e.city, 'description': e.presentation_text(200)} for e in result] result = {'items': entries, 'total_count': total_count} return result return {'items': [], 'total_count': 0}
def start(self, context, request, appstruct, **kw): source = appstruct['source'] targets = appstruct['targets'] root = getSite() edited = edit_keywords(targets, source, root.tree) if edited: root.tree = edited objects = find_entities( interfaces=[ISmartFolder]) for folder in objects: filters = getattr(folder, 'filters', []) for filter_ in filters: tree = filter_.get('metadata_filter', {}).get('tree', None) if tree: edited = edit_keywords(targets, source, tree) if edited: filter_['metadata_filter']['tree'] = edited folder.filters = PersistentList(filters) folder.reindex() objects = find_entities(interfaces=[ISearchableEntity], keywords=[kw.lower() for kw in targets]) for obj in objects: edited = edit_keywords(targets, source, obj.tree) if edited: obj.tree = edited obj.reindex() objects = find_entities(interfaces=[ISiteFolder]) for folder in objects: edited = edit_keywords(targets, source, folder.tree) if edited: folder.tree = edited filters = getattr(folder, 'filters', []) for filter_ in filters: tree = filter_.get('metadata_filter', {}).get('tree', None) if tree: edited = edit_keywords(targets, source, tree) if edited: filter_['metadata_filter']['tree'] = edited folder.filters = PersistentList(filters) folder.reindex() return {}
def delegate_widget(node, kw): user = get_current() organizations = find_entities( user=user, interfaces=[IOrganization]) values = [(o, o.title)for o in organizations] return Select2Widget(values=values)
def perimeter_widget(node, kw): user = get_current() entities = find_entities( user=user, interfaces=[ISearchableEntity]) values = [(o, o.title+'('+o.type_title+')')for o in entities] return Select2Widget(values=values)
def get_geo_cultural_event(request, filters={}, user=None): if user is None: user = get_current() objects = find_entities( user=user, # ignore_end_date=True, include_site=True, **filters) locations = {} for obj in objects: schedules = obj.substitutions for schedule in schedules: coordinates = schedule.venue.addresses[0].get( 'coordinates', None) if schedule.venue else None if coordinates: sections = obj.sections coordinates = coordinates.split(',') data = {} data['latitude'] = coordinates[0] data['longitude'] = coordinates[1] data['coordinates'] = coordinates data['icon'] = 'lacstatic/images/map/marker_blue.png' data['title'] = obj.title data['url'] = request.resource_url(obj, '@@index') data['content'] = renderers.render(obj.templates.get('map'), { 'object': obj, 'schedule': schedule }, request) data['show'] = False sections.append('all') data['categories'] = sections locations[str(schedule.object_id)] = data return locations
def start(self, context, request, appstruct, **kw): all_archived = [] lac_catalog = find_catalog('lac') start_date = datetime.datetime.combine( datetime.datetime.now(), datetime.time(0, 0, 0, tzinfo=pytz.UTC)) start_date_index = lac_catalog['start_date'] query = start_date_index.notinrange(start_date, None) events_toarchive = find_entities( interfaces=[ICulturalEvent], metadata_filter={'states': ['published']}, add_query=query) for event in events_toarchive: event.state = PersistentList(['archived']) event.modified_at = datetime.datetime.now(tz=pytz.UTC) event.reindex() all_archived.append(event) dace_catalog = find_catalog('dace') states_index = dace_catalog['object_states'] object_provides_index = dace_catalog['object_provides'] query = start_date_index.notinrange(start_date, None) &\ states_index.any(['none', 'created']) &\ object_provides_index.any([ISchedule.__identifier__]) schedules_toarchive = query.execute() for schedule in schedules_toarchive: schedule.state = PersistentList(['archived']) schedule.modified_at = datetime.datetime.now(tz=pytz.UTC) schedule.reindex() all_archived.append(schedule) request.registry.notify( ActivityExecuted(self, all_archived, get_current())) return {}
def get_related_contents(self, user, interface): lac_catalog = find_catalog('lac') venue_index = lac_catalog['object_venue'] query = venue_index.any([self.context.get_id()]) objects = find_entities(user=user, interfaces=[interface], metadata_filter={'states': ['published']}, add_query=query, include_site=True) batch = Batch(objects, self.request, default_size=core.BATCH_DEFAULT_SIZE) batch.target = "#results_contents" + str(interface.__name__) len_result = batch.seqlen result_body = [] for obj in batch: render_dict = { 'object': obj, 'current_user': user, 'state': get_states_mapping(user, obj, getattr(obj, 'state_or_none', [None])[0]) } body = self.content(args=render_dict, template=obj.templates['default'])['body'] result_body.append(body) values = {'bodies': result_body, 'batch': batch} contents_body = self.content( args=values, template=self.related_events_template)['body'] return ((result_body and contents_body) or None), len_result
def start(self, context, request, appstruct, **kw): user = get_current() appstruct.pop('_csrf_token_') classifications_ids = appstruct.get('classifications', []) classifications = [] source_class = None if classifications_ids: appstruct.pop('classifications') classifications = [ CLASSIFICATIONS[fid] for fid in classifications_ids ] classifications.reverse() for classification in classifications: source_class = classification(source_class) objects = find_entities(user=user, include_site=True, filters=appstruct['filters']) default = datetime.datetime.now(tz=pytz.UTC) objects = sorted(objects, key=lambda e: getattr(e, 'modified_at', default), reverse=True) from lac.content.smart_folder import generate_search_smart_folder folder = generate_search_smart_folder('Extraction folder') folder.classifications = source_class odtfile = folder.classifications.extract(objects, request, folder, template_type="extraction", filters=appstruct['filters']) return {'file': odtfile, 'filters': appstruct['filters'], 'user': user}
def getattributes(self, values, **args): inverse_substitutions = args.get('inverse_substitutions', {}) user = get_current() folders = find_entities( interfaces=[ISmartFolder], metadata_filter={'states': ['published']}, force_local_control=True) folders = [sf for sf in folders if not sf.parents] values_oids = [get_oid(inverse_substitutions.get(value, value)) for value in values] folders_results = {} for folder in folders: result_set = get_folder_content(folder, user, sort_on=None, intersect=values_oids, **args) folders_results[folder] = result_set.ids attributes = [self._getattribute(v, folders_results, **args) for v in values] attributes = [item for sublist in attributes for item in sublist] result = {self: attributes} if self.subclassification: result.update(self.subclassification.getattributes(values, **args)) return result
def get_related_contents(self, user): interfaces = get_subinterfaces(IBaseReview) interfaces.extend([ICulturalEvent, IFilmSynopses]) objects = find_entities( user=user, interfaces=interfaces, metadata_filter={'states': ['published']}, contribution_filter={'artists_ids': [self.context]}, include_site=True, sort_on='release_date', reverse=True) batch = Batch([o for o in objects], self.request, default_size=core.BATCH_DEFAULT_SIZE) batch.target = "#results_contents" len_result = batch.seqlen result_body = [] for obj in batch: render_dict = { 'object': obj, 'current_user': user, 'state': get_states_mapping(user, obj, getattr(obj, 'state_or_none', [None])[0]) } body = self.content(args=render_dict, template=obj.templates['default'])['body'] result_body.append(body) values = {'bodies': result_body, 'batch': batch} contents_body = self.content( args=values, template=self.related_contents_template)['body'] return ((result_body and contents_body) or None), len_result
def update_coordinates(root, registry): from lac.views.filter import find_entities from lac.content.interface import IVenue contents = find_entities(interfaces=[IVenue]) len_entities = str(len(contents)) for index, venue in enumerate(contents): addresses = getattr(venue, 'addresses', []) result = [] for address in addresses: coordinates = address.get('coordinates', None) if coordinates: coordinates_ll = coordinates.split('-') if len(coordinates_ll) == 2: address['coordinates'] = ','.join(coordinates_ll) result.append(address) venue.addresses = PersistentList(result) venue.reindex() if index % 1000 == 0: log.info("**** Commit ****") transaction.commit() log.info(str(index) + "/" + len_entities) log.info('Addresses evolved.')
def fix_contributors(root, registry): from lac.views.filter import find_entities from lac.content.interface import ISearchableEntity contents = find_entities(interfaces=[ISearchableEntity]) len_entities = str(len(contents)) for index, content in enumerate(contents): if hasattr(content, 'contributors'): original = getattr(content, 'original', None) contributors = content.contributors if content.author and content.author not in contributors: content.addtoproperty('contributors', content.author) contributors = content.contributors if original and original.author and \ original.author not in contributors: content.addtoproperty('contributors', original.author) if index % 1000 == 0: log.info("**** Commit ****") transaction.commit() log.info(str(index) + "/" + len_entities) log.info('Contributors evolved.')
def clean_artists_duplicates(root, registry): from lac.views.filter import find_entities from lac.content.interface import IArtistInformationSheet from lac.utilities.duplicates_utility import (find_duplicates_artist) contents = find_entities(interfaces=[IArtistInformationSheet]) len_entities = str(len(contents)) for index, artist in enumerate(contents): if artist and artist.__parent__ and not artist.author: duplicates = find_duplicates_artist(artist) if duplicates: duplicates.append(artist) publisheds = [v for v in duplicates if 'published' in v.state] published = publisheds[0] if publisheds else artist duplicates.remove(published) for dup in duplicates: replaced = dup.replace_by(published) if replaced: root.delfromproperty('artists', dup) if index % 1000 == 0: log.info("**** Commit ****") transaction.commit() log.info(str(index) + "/" + len_entities) log.info('Clean artists evolved.')
def clean_venues_duplicates(root, registry): from lac.views.filter import find_entities from lac.content.interface import IVenue from lac.utilities.duplicates_utility import (find_duplicates_venue) contents = find_entities(interfaces=[IVenue]) len_entities = str(len(contents)) for index, venue in enumerate(contents): if venue and venue.__parent__ and not venue.author: duplicates = find_duplicates_venue(venue) if duplicates: duplicates.append(venue) publisheds = [v for v in duplicates if 'published' in v.state] published = publisheds[0] if publisheds else venue duplicates.remove(published) for dup in duplicates: replaced = dup.replace_by(published) if replaced: root.delfromproperty('venues', dup) if index % 1000 == 0: log.info("**** Commit ****") transaction.commit() log.info(str(index) + "/" + len_entities) log.info('Clean venues evolved.')
def reviwes_access_control_evolve(root, registry): from lac.views.filter import find_entities from lac.content.interface import IBaseReview from substanced.util import get_oid reviews = find_entities(interfaces=[IBaseReview]) len_entities = str(len(reviews)) wapis = [s for s in root.site_folders if s.title == 'Sortir Wapi'] lilles = [s for s in root.site_folders if s.title == 'Sortir Lille'] wapi_site = wapis[0] wapi_site_oid = get_oid(wapi_site) lille_site = lilles[0] lille_site_oid = get_oid(lille_site) for index, review in enumerate(reviews): reviwe_site = getattr(review, 'source_data', {}).get('site', None) reviwe_siteid = getattr(review, 'source_data', {}).get('source_id', None) if reviwe_site == 'wapi': review.source_site = wapi_site_oid review.access_control = PersistentList([wapi_site_oid]) log.info("Wapi: " + str(index) + "/" + len_entities) elif reviwe_siteid == 'sortir': review.source_site = lille_site_oid review.access_control = PersistentList([lille_site_oid]) #log.info("Lille: "+str(index) + "/" + len_entities) else: source_site = review.source_site if review.source_site else 'all' review.access_control = PersistentList([source_site]) #log.info("CrerationCulturelle: "+str(index) + "/" + len_entities) review.reindex() log.info('Review access_control evolved.')
def start(self, context, request, appstruct, **kw): all_archived = [] lac_catalog = find_catalog('lac') start_date = datetime.datetime.combine( datetime.datetime.now(), datetime.time(0, 0, 0, tzinfo=pytz.UTC)) start_date_index = lac_catalog['start_date'] query = start_date_index.notinrange(start_date, None) events_toarchive = find_entities( interfaces=[ICulturalEvent], metadata_filter={'states': ['published']}, add_query=query) for event in events_toarchive: event.state = PersistentList(['archived']) event.modified_at = datetime.datetime.now(tz=pytz.UTC) event.reindex() all_archived.append(event) dace_catalog = find_catalog('dace') states_index = dace_catalog['object_states'] object_provides_index = dace_catalog['object_provides'] query = start_date_index.notinrange(start_date, None) &\ states_index.any(['none', 'created']) &\ object_provides_index.any([ISchedule.__identifier__]) schedules_toarchive = query.execute() for schedule in schedules_toarchive: schedule.state = PersistentList(['archived']) schedule.modified_at = datetime.datetime.now(tz=pytz.UTC) schedule.reindex() all_archived.append(schedule) request.registry.notify(ActivityExecuted( self, all_archived, get_current())) return {}
def review_imgs_site_evolve(root, registry): from lac.utilities.data_manager import evolve_article_images from lac.views.filter import find_entities from lac.content.interface import IBaseReview reviews = find_entities(interfaces=[IBaseReview]) request = get_current_request() for review in reviews: article = review.article source = getattr(review, 'source_data', {}).get('site', None) if article and source in SOURCE_SITES: try: root_url = request.resource_url(review) resolved, newarticle = evolve_article_images( review, article, SOURCE_SITES.get(source), root_url) except Exception: log.warning(review.title + " images not resolved") continue if resolved: review.article = newarticle review.reindex() log.info(review.title) log.info('Review imgs evolved.')
def normalize_names_evolve(root, registry): from lac.views.filter import find_entities from dace.i18n.normalizer.interfaces import INormalizer from dace.util import name_normalizer valid_normalizer = None normalizer = registry.getUtility(INormalizer, 'default_normalizer') if normalizer: def normalizer_op(word): return normalizer.normalize(word).decode() else: def normalizer_op(word): return name_normalizer(word) valid_normalizer = normalizer_op entities = find_entities(metadata_filter={'states': ['archived']}) len_entities = str(len(entities)) for index, entity in enumerate(entities): old_name = getattr(entity, '__name__', None) new_name = valid_normalizer(old_name) if new_name and old_name and new_name != old_name: parent = getattr(entity, '__parent__', None) if parent is not None: parent.rename(old_name, new_name) log.info(old_name + " -> " + new_name) log.info(str(index) + "/" + len_entities) log.info('End name normalizer evolve')
def start(self, context, request, appstruct, **kw): user = get_current() appstruct.pop('_csrf_token_') classifications_ids = appstruct.get('classifications', []) classifications = [] source_class = None if classifications_ids: appstruct.pop('classifications') classifications = [CLASSIFICATIONS[fid] for fid in classifications_ids] classifications.reverse() for classification in classifications: source_class = classification(source_class) objects = find_entities(user=user, include_site=True, filters=appstruct['filters'] ) default = datetime.datetime.now(tz=pytz.UTC) objects = sorted(objects, key=lambda e: getattr(e, 'modified_at', default), reverse=True) from lac.content.smart_folder import generate_search_smart_folder folder = generate_search_smart_folder('Extraction folder') folder.classifications = source_class odtfile = folder.classifications.extract(objects, request, folder, template_type="extraction", filters=appstruct['filters']) return {'file': odtfile, 'filters': appstruct['filters'], 'user': user}
def find_advertistings(self): #TODO frequence root = getSite() context, is_root = self.get_context(root) keywords = [] if hasattr(context, 'get_all_keywords'): keywords = list(context.get_all_keywords()) else: keywords = list(getattr(context, 'keywords', [])) if not keywords: keywords = [ROOT_TREE] advertisings = getattr(self.request, 'cache_advertisings', None) if advertisings is None: site = str(get_oid(self.request.get_site_folder)) advertisings = find_entities( interfaces=[IWebAdvertising], keywords=keywords, metadata_filter={'states': ['published']}, other_filter={'sources': [site]}, force_publication_date=True) self.request.cache_advertisings = advertisings advertisings = [a for a in advertisings if self.name in getattr(a, 'positions', [])] if not is_root: advertisings = sorted( advertisings, key=lambda e: getattr(e, 'tree', {}).get(ROOT_TREE, {}) and 1 or 2) return advertisings
def normalize_names_evolve(root, registry): from lac.views.filter import find_entities from dace.i18n.normalizer.interfaces import INormalizer from dace.util import name_normalizer valid_normalizer = None normalizer = registry.getUtility(INormalizer, 'default_normalizer') if normalizer: def normalizer_op(word): return normalizer.normalize(word).decode() else: def normalizer_op(word): return name_normalizer(word) valid_normalizer = normalizer_op entities = find_entities(metadata_filter={'states': ['archived']}) len_entities = str(len(entities)) for index, entity in enumerate(entities): old_name = getattr(entity, '__name__', None) new_name = valid_normalizer(old_name) if new_name and old_name and new_name != old_name: parent = getattr(entity, '__parent__', None) if parent is not None: parent.rename(old_name, new_name) log.info(old_name+" -> "+new_name) log.info(str(index) + "/" + len_entities) log.info('End name normalizer evolve')
def perimeter_m_widget(node, kw): user = get_current() entities = find_entities( user=user, interfaces=[ISiteFolder]) values = [(o, o.title)for o in entities] return Select2Widget(values=values)
def members_choice(node, kw): result = find_entities( interfaces=[IPerson], metadata_filter={'states': ['active']}) values = [(u, u.title) for u in result] values = sorted(values, key=lambda e: e[1]) return Select2Widget(values=values, multiple=True)
def clean_venues_duplicates(root, registry): from lac.views.filter import find_entities from lac.content.interface import IVenue from lac.utilities.duplicates_utility import ( find_duplicates_venue) contents = find_entities(interfaces=[IVenue]) len_entities = str(len(contents)) for index, venue in enumerate(contents): if venue and venue.__parent__ and not venue.author: duplicates = find_duplicates_venue(venue) if duplicates: duplicates.append(venue) publisheds = [v for v in duplicates if 'published' in v.state] published = publisheds[0] if publisheds else venue duplicates.remove(published) for dup in duplicates: replaced = dup.replace_by(published) if replaced: root.delfromproperty('venues', dup) if index % 1000 == 0: log.info("**** Commit ****") transaction.commit() log.info(str(index) + "/" + len_entities) log.info('Clean venues evolved.')
def update(self): self.execute(None) objects = find_entities( user=get_current(), interfaces=[IOrganization], sort_on='modified_at', reverse=True) batch = Batch(objects, self.request, default_size=BATCH_DEFAULT_SIZE) batch.target = "#results_organizations" len_result = batch.seqlen index = str(len_result) if len_result > 1: index = '*' self.title = _(CONTENTS_MESSAGES[index], mapping={'nember': len_result}) result_body = [] for obj in batch: object_values = {'object': obj} body = self.content(args=object_values, template=obj.templates['default'])['body'] result_body.append(body) result = {} values = { 'bodies': result_body, 'length': len_result, 'batch': batch, } body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) result['coordinates'] = {self.coordinates: [item]} return result
def review_imgs_site_evolve(root, registry): from lac.utilities.data_manager import evolve_article_images from lac.views.filter import find_entities from lac.content.interface import IBaseReview reviews = find_entities(interfaces=[IBaseReview]) request = get_current_request() for review in reviews: article = review.article source = getattr(review, 'source_data', {}).get('site', None) if article and source in SOURCE_SITES: try: root_url = request.resource_url(review) resolved, newarticle = evolve_article_images( review, article, SOURCE_SITES.get(source), root_url) except Exception: log.warning(review.title+" images not resolved") continue if resolved: review.article = newarticle review.reindex() log.info(review.title) log.info('Review imgs evolved.')
def update(self): self.execute(None) user = get_current() site = str(get_oid(get_site_folder(True))) games = find_entities(interfaces=[IGame], metadata_filter={'states': ['published']}, other_filter={'sources': [site]}, force_publication_date=True) batch = Batch(games, self.request, default_size=BATCH_DEFAULT_SIZE) batch.target = "#results_contents" len_result = batch.seqlen index = str(len_result) if len_result > 1: index = '*' self.title = _(CONTENTS_MESSAGES[index], mapping={'nember': len_result}) result_body = [] for obj in batch: render_dict = {'object': obj, 'current_user': user, 'state': None} body = self.content(args=render_dict, template=obj.templates['default'])['body'] result_body.append(body) result = {} values = {'bodies': result_body, 'batch': batch} body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) result['coordinates'] = {self.coordinates: [item]} return result
def get_folder_content(folder, user, add_query=None, sort_on='release_date', reverse=True, **args): _filters = deepcopy(getattr(folder, 'filters', [])) # retrieve classifications queries classifications = [ CLASSIFICATIONS[fid] for fid in getattr(folder, 'classifications', []) ] query = None if classifications: for classification in classifications: classification_query = classification().get_query(**args) query = and_op(query, classification_query) query = and_op(query, add_query) objects = find_entities(user=user, add_query=query, sort_on=sort_on, reverse=reverse, filters=_filters, include_site=True, filter_op='or', **args) return objects
def get_related_contents(self, user, interface): lac_catalog = find_catalog('lac') venue_index = lac_catalog['object_venue'] query = venue_index.any([self.context.get_id()]) objects = find_entities( user=user, interfaces=[interface], metadata_filter={'states': ['published']}, add_query=query, include_site=True) batch = Batch(objects, self.request, default_size=core.BATCH_DEFAULT_SIZE) batch.target = "#results_contents"+str(interface.__name__) len_result = batch.seqlen result_body = [] for obj in batch: render_dict = {'object': obj, 'current_user': user, 'state': get_states_mapping(user, obj, getattr(obj, 'state_or_none', [None])[0])} body = self.content(args=render_dict, template=obj.templates['default'])['body'] result_body.append(body) values = {'bodies': result_body, 'batch': batch} contents_body = self.content( args=values, template=self.related_events_template)['body'] return ((result_body and contents_body) or None), len_result
def get_related_contents(self, user): interfaces = get_subinterfaces(IBaseReview) interfaces.extend([ICulturalEvent, IFilmSynopses]) objects = find_entities( user=user, interfaces=interfaces, metadata_filter={'states': ['published']}, contribution_filter={'artists_ids': [self.context]}, include_site=True, sort_on='release_date', reverse=True) batch = Batch([o for o in objects], self.request, default_size=core.BATCH_DEFAULT_SIZE) batch.target = "#results_contents" len_result = batch.seqlen result_body = [] for obj in batch: render_dict = {'object': obj, 'current_user': user, 'state': get_states_mapping(user, obj, getattr(obj, 'state_or_none', [None])[0])} body = self.content(args=render_dict, template=obj.templates['default'])['body'] result_body.append(body) values = {'bodies': result_body, 'batch': batch} contents_body = self.content( args=values, template=self.related_contents_template)['body'] return ((result_body and contents_body) or None), len_result
def clean_artists_duplicates(root, registry): from lac.views.filter import find_entities from lac.content.interface import IArtistInformationSheet from lac.utilities.duplicates_utility import ( find_duplicates_artist) contents = find_entities(interfaces=[IArtistInformationSheet]) len_entities = str(len(contents)) for index, artist in enumerate(contents): if artist and artist.__parent__ and not artist.author: duplicates = find_duplicates_artist(artist) if duplicates: duplicates.append(artist) publisheds = [v for v in duplicates if 'published' in v.state] published = publisheds[0] if publisheds else artist duplicates.remove(published) for dup in duplicates: replaced = dup.replace_by(published) if replaced: root.delfromproperty('artists', dup) if index % 1000 == 0: log.info("**** Commit ****") transaction.commit() log.info(str(index) + "/" + len_entities) log.info('Clean artists evolved.')
def reviwes_access_control_evolve(root, registry): from lac.views.filter import find_entities from lac.content.interface import IBaseReview from substanced.util import get_oid reviews = find_entities(interfaces=[IBaseReview]) len_entities = str(len(reviews)) wapis = [s for s in root.site_folders if s.title == 'Sortir Wapi'] lilles = [s for s in root.site_folders if s.title == 'Sortir Lille'] wapi_site = wapis[0] wapi_site_oid = get_oid(wapi_site) lille_site = lilles[0] lille_site_oid = get_oid(lille_site) for index, review in enumerate(reviews): reviwe_site = getattr(review, 'source_data', {}).get('site', None) reviwe_siteid = getattr(review, 'source_data', {}).get('source_id', None) if reviwe_site == 'wapi': review.source_site = wapi_site_oid review.access_control = PersistentList([wapi_site_oid]) log.info("Wapi: "+str(index) + "/" + len_entities) elif reviwe_siteid == 'sortir': review.source_site = lille_site_oid review.access_control = PersistentList([lille_site_oid]) #log.info("Lille: "+str(index) + "/" + len_entities) else: source_site = review.source_site if review.source_site else 'all' review.access_control = PersistentList([source_site]) #log.info("CrerationCulturelle: "+str(index) + "/" + len_entities) review.reindex() log.info('Review access_control evolved.')
def start(self, context, request, appstruct, **kw): source = appstruct['source'] targets = appstruct['targets'] root = getSite() edited = edit_keywords(targets, source, root.tree) if edited: root.tree = edited objects = find_entities(interfaces=[ISmartFolder]) for folder in objects: filters = getattr(folder, 'filters', []) for filter_ in filters: tree = filter_.get('metadata_filter', {}).get('tree', None) if tree: edited = edit_keywords(targets, source, tree) if edited: filter_['metadata_filter']['tree'] = edited folder.filters = PersistentList(filters) folder.reindex() objects = find_entities(interfaces=[ISearchableEntity], keywords=[kw.lower() for kw in targets]) for obj in objects: edited = edit_keywords(targets, source, obj.tree) if edited: obj.tree = edited obj.reindex() objects = find_entities(interfaces=[ISiteFolder]) for folder in objects: edited = edit_keywords(targets, source, folder.tree) if edited: folder.tree = edited filters = getattr(folder, 'filters', []) for filter_ in filters: tree = filter_.get('metadata_filter', {}).get('tree', None) if tree: edited = edit_keywords(targets, source, tree) if edited: filter_['metadata_filter']['tree'] = edited folder.filters = PersistentList(filters) folder.reindex() return {}
def find_entities(self): name = self.params('q') if name: root = getSite() user = get_current() site = get_site_folder(True) site_id = get_oid(site) folders = find_entities(interfaces=[ISmartFolder], metadata_filter={'states': ['published']}, force_local_control=True) folders = [sf for sf in folders if not sf.parents] results = [] for folder in folders: objects = [] if is_all_values_key(name): objects = get_folder_content(folder, user, ignore_end_date=True, sort_on=None, metadata_filter={ 'content_types': ['cultural_event'], 'states': ['published'] }) elif name: objects = get_folder_content( folder, user, sort_on='relevant_data', metadata_filter={ 'content_types': ['cultural_event'], 'states': ['published'] }, text_filter={'text_to_search': name}, ignore_end_date=True) if objects: results.append({ 'folder': folder, 'objects': list(objects)[:5] }) results = sorted(results, key=lambda e: e['folder'].get_order(site_id)) values = { 'folders': results, 'all_url': self.request.resource_url(root, '@@search_result', query={'text_to_search': name}), 'advenced_search_url': self.request.resource_url(root, '@@advanced_search') } body = self.content(args=values, template=self.search_template)['body'] return {'body': body} return {'body': ''}
def group_choice(node, kw): user = get_current() result = find_entities( user, interfaces=[IGroup]) values = [(g, g.title) for g in result] values = sorted(values, key=lambda e: e[1]) return Select2Widget(values=values, multiple=True)
def update(self): self.execute(None) user = get_current() filter_form, filter_data = self._add_filter(user) content_types = ['cultural_event', 'venue', 'artist'] args = {'metadata_filter': {'content_types': content_types}} args = merge_with_filter_view(self, args) args['request'] = self.request if not args.get('metadata_filter', {}).get('content_types', []): metadata_filter = args.get('metadata_filter', {}) metadata_filter['content_types'] = content_types args['metadata_filter'] = metadata_filter args['metadata_filter']['states'] = ['published'] objects = find_entities(user=user, sort_on='object_title', reverse=False, include_site=True, **args) url = self.request.resource_url(self.context, 'allduplicates') def condition(obj): """Batch condition""" return obj.get_duplicates() batch = ConditionalBatch( condition, objects, self.request, url=url, default_size=15) batch.target = "#results_contents" len_result = batch.seqlen index = str(len_result) if len_result > 1: index = '*' self.title = _(CONTENTS_MESSAGES[index], mapping={'nember': len_result}) filter_data['filter_message'] = self.title filter_body = self.filter_instance.get_body(filter_data) result_body = [] for obj in batch: render_dict = {'object': obj, 'current_user': user, 'state': get_states_mapping(user, obj, getattr(obj, 'state_or_none', [None])[0])} body = self.content(args=render_dict, template=obj.templates['duplicates'])['body'] result_body.append(body) result = {} values = {'bodies': result_body, 'batch': batch, 'filter_body': filter_body} body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) result['coordinates'] = {self.coordinates: [item]} result['css_links'] = filter_form['css_links'] result['js_links'] = filter_form['js_links'] return result
def update(self): self.execute(None) user = get_current() validated = getattr(self, 'validated', {}) posted = self.request.POST or self.request.GET or {} posted = posted.copy() clear_posted = False if not validated: if posted: clear_posted = True searcinstance = SearchView(self.context, self.request, filter_result=True) if searcinstance.validated: validated = searcinstance.validated objects = find_entities( user=user, sort_on='release_date', reverse=True, include_site=True, **validated) url = self.request.resource_url( self.context, self.request.view_name, query=posted) batch = Batch(objects, self.request, default_size=core.BATCH_DEFAULT_SIZE, url=url) #clear posted values: See usermenu panel if clear_posted: if self.request.POST: self.request.POST.clear() elif self.request.GET: self.request.GET.clear() batch.target = "#results_contents" len_result = batch.seqlen index = str(len_result) if len_result > 1: index = '*' self.title = _(CONTENTS_MESSAGES[index], mapping={'nember': len_result}) result_body = [] for obj in batch: render_dict = {'object': obj, 'current_user': user, 'state': get_states_mapping(user, obj, getattr(obj, 'state_or_none', [None])[0])} body = self.content(args=render_dict, template=obj.templates['default'])['body'] result_body.append(body) result = {} values = {'bodies': result_body, 'batch': batch} body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) result['coordinates'] = {self.coordinates: [item]} return result
def artist_hash_data_evolve(root, registry): from lac.views.filter import find_entities from lac.content.interface import IArtistInformationSheet artists = find_entities(interfaces=[IArtistInformationSheet]) for artist in artists: artist.hash_artist_data() log.info('Artists presentation text evolved.')
def venue_hash_data_evolve(root, registry): from lac.views.filter import find_entities from lac.content.interface import IVenue venues = find_entities(interfaces=[IVenue]) for venue in venues: venue.hash_venue_data() log.info('Venues presentation text evolved.')
def bind(self): user = get_current() site = get_site_folder(True) oid = get_oid(site) folders = find_entities( user=user, interfaces=[ISmartFolder], force_local_control=True) #states=['published']) folders = [sf for sf in folders if not sf.parents] folders = sorted(folders, key=lambda e: e.get_order(oid)) return {'folders': folders}
def update(self): self.execute(None) user = get_current() filter_form, filter_data = self._add_filter(user) content_types = list(core.SEARCHABLE_CONTENTS.keys()) args = {'metadata_filter': {'content_types': content_types}} args = merge_with_filter_view(self, args) args['request'] = self.request objects = find_entities(user=user, sort_on='release_date', reverse=True, include_site=True, **args) url = self.request.resource_url(self.context, 'contentstomoderate') batch = Batch(objects, self.request, url=url, default_size=core.BATCH_DEFAULT_SIZE) batch.target = "#results_contents" len_result = batch.seqlen index = str(len_result) if len_result > 1: index = '*' self.title = _(CONTENTS_MESSAGES[index], mapping={'nember': len_result}) filter_data['filter_message'] = self.title filter_body = self.filter_instance.get_body(filter_data) result_body = [] for obj in batch: render_dict = { 'object': obj, 'current_user': user, 'state': get_states_mapping(user, obj, getattr(obj, 'state_or_none', [None])[0]) } body = self.content(args=render_dict, template=obj.templates['default'])['body'] result_body.append(body) result = {} values = { 'bodies': result_body, 'batch': batch, 'filter_body': filter_body } body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) result['coordinates'] = {self.coordinates: [item]} result['css_links'] = filter_form['css_links'] result['js_links'] = filter_form['js_links'] return result