def alert(kind="", senders=[], recipients=[], data={}): alert_op = ALERTS.get(kind, None) if alert_op: return alert_op(senders, recipients, data) log.warning("Alert kind {kind} not implemented".format(kind=kind)) return None
def get_schedules(schedules_str, venue, dates): lines = schedules_str.replace('\r', '').split('\n') lines.append('') schedules_data = [] schedules = [] schedule = [] for line in lines: if line: schedule.append(line) else: if schedule: schedules_data.append(schedule) schedule = [] #Preserve order schedules_data.reverse() for schedule in schedules_data: try: obj = FilmSchedule(title=schedule[0], description='\n'.join(schedule[1:]), dates=dates) obj.setproperty('venue', venue) obj.state = PersistentList(['published']) schedules.append(obj) except Exception as error: log.warning(error) return schedules
def generate_images(fp, filename, dimension): x = dimension.get('x', 0) y = dimension.get('y', 0) deg = dimension.get('r', 0) area_height = dimension.get('area_height', 0) area_width = dimension.get('area_width', 0) result = [] for img_format in IMAGES_FORMATS: try: img = Image.open(fp) if img.mode == 'P': img = img.convert('RGB') except OSError as e: log.warning(e) return result height = img.size[1] width = img.size[0] size = img_format['size'] left, upper, right, lower = _get_coordinates(height, width, area_height, area_width, x, y, size[1], size[0]) img = img.rotate(deg).crop((left, upper, right, lower)) img.thumbnail(size, Image.ANTIALIAS) buf = io.BytesIO() ext = os.path.splitext(filename)[1].lower() img.save(buf, Image.EXTENSION.get(ext, 'jpeg')) buf.seek(0) img_data = img_format.copy() img_data['fp'] = buf result.append(img_data) return result
def update(self): source_oid = self.params('source') targets_oid = self.params('targets') if targets_oid and source_oid: try: if not isinstance(targets_oid, (list, tuple)): targets_oid = [targets_oid] targets = [get_obj(int(t)) for t in targets_oid] source = get_obj(int(source_oid)) if targets and source: result = self.execute( {'source': source, 'targets': targets}) if result and result[0].get('error', False): view_error = ViewError() view_error.principalmessage = _("An error has occurred.") return self.failure(view_error) return HTTPFound( self.request.resource_url(source, '@@index')) except Exception as error: log.warning(error) return HTTPFound(self.request.resource_url(self.context, ''))
def start(self, context, request, appstruct, **kw): odtfile = io.BytesIO() try: if appstruct: schedules = appstruct['venues'] next_date = dates_to_fr_date(appstruct['next_date']) schedules_objs = [] for venue_data in schedules: venue = venue_data['title'] schedules_objs.extend(get_schedules( venue_data['schedules'], venue, next_date)) source_class = None classifications = (CLASSIFICATIONS['venue_classification'], CLASSIFICATIONS['city_classification']) for classification in classifications: source_class = classification(source_class) folder = generate_search_smart_folder('Extraction folder') folder.classifications = source_class odtfile = folder.classifications.extract( schedules_objs, request, folder, template_type="extraction") transaction.abort() except Exception as error: log.warning(error) return {'odtfile': odtfile}
def alert(kind="", senders=[], recipients=[], data={}): alert_op = ALERTS.get(kind, None) if alert_op: return alert_op(senders, recipients, data) log.warning("Alert kind {kind} not implemented".format(kind=kind)) return None
def update(self): source_oid = self.params('source') targets_oid = self.params('targets') if targets_oid and source_oid: try: if not isinstance(targets_oid, (list, tuple)): targets_oid = [targets_oid] targets = [get_obj(int(t)) for t in targets_oid] source = get_obj(int(source_oid)) if targets and source: result = self.execute({ 'source': source, 'targets': targets }) if result: values = result[0] values['context'] = source body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) result = {} result['coordinates'] = {self.coordinates: [item]} return result except Exception as error: log.warning(error) view_error = ViewError() view_error.principalmessage = _("An error has occurred.") return self.failure(view_error)
def update(self): source_oid = self.params('source') targets_oid = self.params('targets') if targets_oid and source_oid: try: if not isinstance(targets_oid, (list, tuple)): targets_oid = [targets_oid] targets = [get_obj(int(t)) for t in targets_oid] source = get_obj(int(source_oid)) if targets and source: result = self.execute({ 'source': source, 'targets': targets }) if result and result[0].get('error', False): view_error = ViewError() view_error.principalmessage = _( "An error has occurred.") return self.failure(view_error) return HTTPFound( self.request.resource_url(source, '@@index')) except Exception as error: log.warning(error) return HTTPFound(self.request.resource_url(self.context, ''))
def start(self, context, request, appstruct, **kw): odtfile = io.BytesIO() try: if appstruct: schedules = appstruct['venues'] next_date = dates_to_fr_date(appstruct['next_date']) schedules_objs = [] for venue_data in schedules: venue = venue_data['title'] schedules_objs.extend( get_schedules(venue_data['schedules'], venue, next_date)) source_class = None classifications = (CLASSIFICATIONS['venue_classification'], CLASSIFICATIONS['city_classification']) for classification in classifications: source_class = classification(source_class) folder = generate_search_smart_folder('Extraction folder') folder.classifications = source_class odtfile = folder.classifications.extract( schedules_objs, request, folder, template_type="extraction") transaction.abort() except Exception as error: log.warning(error) return {'odtfile': odtfile}
def generate_images(fp, filename, dimension): x = dimension.get('x', 0) y = dimension.get('y', 0) deg = dimension.get('r', 0) area_height = dimension.get('area_height', 0) area_width = dimension.get('area_width', 0) result = [] for img_format in IMAGES_FORMATS: try: img = Image.open(fp) if img.mode == 'P': img = img.convert('RGB') except OSError as e: log.warning(e) return result height = img.size[1] width = img.size[0] size = img_format['size'] left, upper, right, lower = _get_coordinates( height, width, area_height, area_width, x, y, size[1], size[0]) img = img.rotate(deg).crop((left, upper, right, lower)) img.thumbnail(size, Image.ANTIALIAS) buf = io.BytesIO() ext = os.path.splitext(filename)[1].lower() img.save(buf, Image.EXTENSION.get(ext, 'jpeg')) buf.seek(0) img_data = img_format.copy() img_data['fp'] = buf result.append(img_data) return result
def get_content_template(self): if self.content_template: try: return self.content_template.fp.readall().decode() except Exception as error: log.warning(error) return ''
def get_content_template(self): if self.content_template: try: return self.content_template.fp.readall().decode() except Exception as error: log.warning(error) return ''
def remove_entry(self, key, id): try: results = es.delete(index='lac', doc_type=key, id=id, refresh=True) except Exception as e: log.warning(e)
def title_getter(id): try: obj = get_obj(int(id), None) if obj: return obj.title else: return id except Exception as e: log.warning(e) return id
def title_getter(id): try: obj = get_obj(int(id), None) if obj: return obj.title else: return id except Exception as e: log.warning(e) return id
def get_entry(self, id): try: results = es.get(index='lac', id=id) except Exception as e: log.warning(e) return None if results: return results['_source'] return None
def title_getter(obj): if not isinstance(obj, str): return getattr(obj, 'title', obj) else: try: obj = get_obj(int(obj), None) if obj: return obj.title else: return obj except Exception as e: log.warning(e) return obj
def update(self): oid = self.params('oid') if oid: try: newsletter = get_obj(int(oid)) user = self.params('user') result = self.execute({'newsletter': newsletter, 'user': user}) return result[0] except Exception as error: log.warning(error) root = getSite() return HTTPFound(self.request.resource_url(root, ""))
def title_getter(obj): if not isinstance(obj, str): return getattr(obj, 'title', obj) else: try: obj = get_obj(int(obj), None) if obj: return obj.title else: return obj except Exception as e: log.warning(e) return obj
def start(self, context, request, appstruct, **kw): root = getSite() sites = root.site_folders now = datetime.datetime.combine( datetime.datetime.utcnow(), datetime.time(23, 59, 59, tzinfo=pytz.UTC)) for site in sites: #TODO Synchronize Publish Settings if hasattr(site, 'closing_date'): closing_date = site.closing_date.replace(tzinfo=pytz.UTC) if now >= closing_date: site.publication_number += 1 site.closing_date += datetime.timedelta( days=site.closing_frequence) log.warning('Synchronize Publish Settings') return {}
def start(self, context, request, appstruct, **kw): root = getSite() sites = root.site_folders now = datetime.datetime.combine( datetime.datetime.utcnow(), datetime.time(23, 59, 59, tzinfo=pytz.UTC)) for site in sites: #TODO Synchronize Publish Settings if hasattr(site, 'closing_date'): closing_date = site.closing_date.replace(tzinfo=pytz.UTC) if now >= closing_date: site.publication_number += 1 site.closing_date += datetime.timedelta( days=site.closing_frequence) log.warning('Synchronize Publish Settings') return {}
def start(self, context, request, appstruct, **kw): """Load entities from the selected json file""" root = getSite() current_site = get_site_folder(True, request) entities = appstruct.get('entities') entities_file = entities['_object_data'].fp entities_str = entities_file.readall().decode('utf8') #Load entities entities_list = json.loads(entities_str) #Recuperate the access control (sites where entities will be displayed) new_entities, current_objects = get_new_entities(entities_list) access_control = list(appstruct.get('access_control', None)) access_control = get_application_sites(access_control, root, current_site) for object_ in current_objects: obj_access_control = getattr(object_, 'access_control', []) if obj_access_control and 'all' not in obj_access_control: obj_access_control.extend([get_oid(s) for s in access_control]) object_.access_control = PersistentList( set(obj_access_control)) object_.reindex() len_entities = str(len(new_entities)) for index, entity in enumerate(new_entities): #recuperate the type of the entitie entity_type = entity.pop('type') #recuperate the factory factory = FACTORIES.get(entity_type, None) if factory: #add the entitie try: factory(entity, 'published', root, access_control=access_control) log.info(str(index) + "/" + len_entities) except Exception as error: log.warning(error) if index % NB_AFTER_COMMIT == 0: log.info("**** Commit ****") transaction.commit() return {}
def get_entries(self, key=None, query={"match_all": {}}, params={}, sort={}, fields=[]): try: body = {'query': query, 'sort': sort } if fields: body['fields'] = fields results = es.search(index='lac', doc_type=key, params=params, body=body) except Exception as e: log.warning(e) return None, 0 total = results['hits']['total'] return results['hits']['hits'], total
def start(self, context, request, appstruct, **kw): """Load entities from the selected json file""" root = getSite() current_site = get_site_folder(True, request) entities = appstruct.get('entities') entities_file = entities['_object_data'].fp entities_str = entities_file.readall().decode('utf8') #Load entities entities_list = json.loads(entities_str) #Recuperate the access control (sites where entities will be displayed) new_entities, current_objects = get_new_entities(entities_list) access_control = list(appstruct.get('access_control', None)) access_control = get_application_sites( access_control, root, current_site) for object_ in current_objects: obj_access_control = getattr(object_, 'access_control', []) if obj_access_control and 'all' not in obj_access_control: obj_access_control.extend([get_oid(s) for s in access_control]) object_.access_control = PersistentList(set(obj_access_control)) object_.reindex() len_entities = str(len(new_entities)) for index, entity in enumerate(new_entities): #recuperate the type of the entitie entity_type = entity.pop('type') #recuperate the factory factory = FACTORIES.get(entity_type, None) if factory: #add the entitie try: factory(entity, 'published', root, access_control=access_control) log.info(str(index) + "/" + len_entities) except Exception as error: log.warning(error) if index % NB_AFTER_COMMIT == 0: log.info("**** Commit ****") transaction.commit() return {}
def create_index(self): try: try: exists = es.indices.exists('lac') except Exception: # wait elasticsearch start and check again time.sleep(5) exists = es.indices.exists('lac') if not exists: es.indices.create( index='lac', body={'settings': { 'number_of_replicas': 0, 'number_of_shards': 1, }}, ignore=400) # RequestError: TransportError(400, u'IndexAlreadyExistsException[[lac] already exists]') return False except Exception as e: log.warning(e) return True
def update(self): root = getSite() valid_sites = get_valid_sites(root) #recuperate the source url: json file (see servicies: digitick...) source = self.params('source') if source: entities_file = urllib.request.urlopen(source) entities_str = entities_file.read().decode('utf8') #load the json file (all entities) all_imported_entities = json.loads(entities_str) #recuperate new entities entities_to_import, current_objects = get_new_entities( all_imported_entities) len_entities = str(len(entities_to_import)) for index, entity in enumerate(entities_to_import): #recuperate the type of the entitie entity_type = entity.pop('type') #recuperate the factory factory = FACTORIES.get(entity_type, None) if factory: #add the entitie try: factory(entity, 'submitted', root, sites=valid_sites) log.info(str(index) + "/" + len_entities) except Exception as error: log.warning(error) if index % NB_AFTER_COMMIT == 0: log.info("**** Commit ****") transaction.commit() result = {} values = {'context': self.context} body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) result['coordinates'] = {self.coordinates: [item]} return result
def update(self): root = getSite() valid_sites = get_valid_sites(root) #recuperate the source url: json file (see servicies: digitick...) source = self.params('source') if source: entities_file = urllib.request.urlopen(source) entities_str = entities_file.read().decode('utf8') #load the json file (all entities) all_imported_entities = json.loads(entities_str) #recuperate new entities entities_to_import, current_objects = get_new_entities(all_imported_entities) len_entities = str(len(entities_to_import)) for index, entity in enumerate(entities_to_import): #recuperate the type of the entitie entity_type = entity.pop('type') #recuperate the factory factory = FACTORIES.get(entity_type, None) if factory: #add the entitie try: factory(entity, 'submitted', root, sites=valid_sites) log.info(str(index) + "/" + len_entities) except Exception as error: log.warning(error) if index % NB_AFTER_COMMIT == 0: log.info("**** Commit ****") transaction.commit() result = {} values = {'context': self.context} body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) result['coordinates'] = {self.coordinates: [item]} return result
def validate_file_content(node, appstruct, width, height): if appstruct['picture']: mimetype = appstruct['picture']['mimetype'] file_value = getattr(appstruct['picture']['fp'], 'raw', appstruct['picture']['fp']) if mimetype.startswith('image'): try: file_value.seek(0) except Exception as e: log.warning(e) img = Image.open(file_value) img_width = img.size[0] img_height = img.size[1] file_value.seek(0) if img_width > width or img_height > height: raise colander.Invalid(node, _ (_('The image size is not valid: the allowed size is ${width} x ${height} px.', mapping={'width': width, 'height': height}))) if mimetype.startswith('application/x-shockwave-flash'): try: file_value.seek(0) except Exception as e: log.warning(e) header = parse(file_value) file_value.seek(0) flash_width = header['width'] flash_height = header['height'] if flash_width > width or flash_height > height: raise colander.Invalid(node, _ (_('The flash animation size is not valid: the allowed size is ${width} x ${height} px.', mapping={'width': width, 'height': height})))
def remove_index(self): try: es.indices.delete(index='lac', ignore=[400, 404]) except Exception as e: # NotFoundError: TransportError(404, u'IndexMissingException[[lac] missing]') log.warning(e)