def _update(self, provider): self.provider = provider self.path = provider.get('config', {}).get('path', None) if not self.path: return [] for filename in get_sorted_files(self.path, sort_by=FileSortAttributes.created): try: filepath = os.path.join(self.path, filename) if os.path.isfile(filepath): stat = os.lstat(filepath) last_updated = datetime.fromtimestamp(stat.st_mtime, tz=utc) if self.is_latest_content(last_updated, provider.get('last_updated')): with open(filepath, 'r') as f: item = self.parser.parse_message(etree.fromstring(f.read()), provider) self.move_file(self.path, filename, provider=provider, success=True) yield [item] else: self.move_file(self.path, filename, provider=provider, success=True) except etreeParserError as ex: logger.exception("Ingest Type: AAP - File: {0} could not be processed".format(filename)) self.move_file(self.path, filename, provider=provider, success=False) raise ParserError.nitfParserError(ex, provider) except ParserError as ex: self.move_file(self.path, filename, provider=provider, success=False) except Exception as ex: self.move_file(self.path, filename, provider=provider, success=False) raise ProviderError.ingestError(ex, provider) push_notification('ingest:update')
def parse_message(self, tree, provider): item = {} try: docdata = tree.find('head/docdata') # set the default type. item['type'] = ITEM_CLASS_TEXT item['guid'] = item['uri'] = docdata.find('doc-id').get('id-string') item['urgency'] = docdata.find('urgency').get('ed-urg', '5') item['pubstatus'] = docdata.attrib.get('management-status', 'usable') item['firstcreated'] = get_norm_datetime(docdata.find('date.issue')) item['versioncreated'] = get_norm_datetime(docdata.find('date.issue')) item['expiry'] = get_norm_datetime(docdata.find('date.expire')) item['subject'] = get_subjects(tree) item['body_html'] = get_content(tree) item['place'] = get_places(docdata) item['keywords'] = get_keywords(docdata) if docdata.find('ed-msg') is not None: item['ednote'] = docdata.find('ed-msg').attrib.get('info') item['headline'] = tree.find('body/body.head/hedline/hl1').text elem = tree.find('body/body.head/abstract') item['abstract'] = elem.text if elem is not None else '' elem = tree.find('body/body.head/dateline/location/city') item['dateline'] = elem.text if elem is not None else '' item['byline'] = get_byline(tree) parse_meta(tree, item) item.setdefault('word_count', get_word_count(item['body_html'])) return item except Exception as ex: raise ParserError.nitfParserError(ex, provider)
def parse_message(self, tree, provider): item = {} try: docdata = tree.find('head/docdata') # set the default type. item['type'] = ITEM_CLASS_TEXT item['guid'] = item['uri'] = docdata.find('doc-id').get('id-string') item['urgency'] = docdata.find('urgency').get('ed-urg', '5') item['pubstatus'] = docdata.attrib.get('management-status', 'usable') item['firstcreated'] = get_norm_datetime(docdata.find('date.issue')) item['versioncreated'] = get_norm_datetime(docdata.find('date.issue')) item['expiry'] = get_norm_datetime(docdata.find('date.expire')) item['subject'] = get_subjects(tree) item['body_html'] = get_content(tree) item['place'] = get_places(docdata) item['keywords'] = get_keywords(docdata) if docdata.find('ed-msg') is not None: item['ednote'] = docdata.find('ed-msg').attrib.get('info') item['headline'] = tree.find('body/body.head/hedline/hl1').text elem = tree.find('body/body.head/abstract') item['abstract'] = elem.text if elem is not None else '' elem = tree.find('body/body.head/dateline/location/city') item['dateline'] = elem.text if elem is not None else '' item['byline'] = get_byline(tree) parse_meta(tree, item) item.setdefault('word_count', get_word_count(item['body_html'])) return item except Exception as ex: raise ParserError.nitfParserError(ex, provider)
def parse_message(self, tree, provider): item = {} try: docdata = tree.find("head/docdata") # set the default type. item["type"] = ITEM_CLASS_TEXT item["guid"] = item["uri"] = docdata.find("doc-id").get("id-string") item["urgency"] = docdata.find("urgency").get("ed-urg", "5") item["pubstatus"] = docdata.attrib.get("management-status", "usable") item["firstcreated"] = get_norm_datetime(docdata.find("date.issue")) item["versioncreated"] = get_norm_datetime(docdata.find("date.issue")) item["expiry"] = get_norm_datetime(docdata.find("date.expire")) item["subject"] = get_subjects(tree) item["body_html"] = get_content(tree) item["place"] = get_places(docdata) item["keywords"] = get_keywords(docdata) if docdata.find("ed-msg") is not None: item["ednote"] = docdata.find("ed-msg").attrib.get("info") item["headline"] = super().trim_headline(tree.find("body/body.head/hedline/hl1").text) elem = tree.find("body/body.head/abstract") item["abstract"] = elem.text if elem is not None else "" elem = tree.find("body/body.head/dateline/location/city") item["dateline"] = elem.text if elem is not None else "" item["byline"] = get_byline(tree) parse_meta(tree, item) item.setdefault("word_count", get_word_count(item["body_html"])) return item except Exception as ex: raise ParserError.nitfParserError(ex, provider)
def parse(self, xml, provider=None): item = {} try: docdata = xml.find('head/docdata') # set the default type. item[ITEM_TYPE] = CONTENT_TYPE.TEXT item['guid'] = item['uri'] = docdata.find('doc-id').get('id-string') if docdata.find('urgency') is not None: item['urgency'] = int(docdata.find('urgency').get('ed-urg', '5')) item['pubstatus'] = (docdata.attrib.get('management-status', 'usable')).lower() item['firstcreated'] = get_norm_datetime(docdata.find('date.issue')) item['versioncreated'] = get_norm_datetime(docdata.find('date.issue')) if docdata.find('date.expire') is not None: item['expiry'] = get_norm_datetime(docdata.find('date.expire')) item['subject'] = get_subjects(xml) item['body_html'] = get_content(xml) item['place'] = get_places(docdata) item['keywords'] = get_keywords(docdata) if xml.find('head/tobject/tobject.property') is not None: genre = xml.find('head/tobject/tobject.property').get('tobject.property.type') genre_map = superdesk.get_resource_service('vocabularies').find_one(req=None, _id='genre') if genre_map is not None: item['genre'] = [x for x in genre_map.get('items', []) if x['name'] == genre] if docdata.find('ed-msg') is not None: item['ednote'] = docdata.find('ed-msg').attrib.get('info') if xml.find('body/body.head/hedline/hl1') is not None: item['headline'] = xml.find('body/body.head/hedline/hl1').text else: if xml.find('head/title') is not None: item['headline'] = xml.find('head/title').text elem = xml.find('body/body.head/abstract/p') item['abstract'] = elem.text if elem is not None else '' if elem is None: elem = xml.find('body/body.head/abstract') item['abstract'] = elem.text if elem is not None else '' elem = xml.find('body/body.head/dateline/location/city') if elem is not None: self.set_dateline(item, city=elem.text) item['byline'] = get_byline(xml) parse_meta(xml, item) item.setdefault('word_count', get_word_count(item['body_html'])) return item except Exception as ex: raise ParserError.nitfParserError(ex, provider)
def parse(self, xml, provider=None): item = {ITEM_TYPE: CONTENT_TYPE.TEXT, # set the default type. } try: self.do_mapping(item, xml, SETTINGS_MAPPING_PARAM) elem = xml.find('body/body.head/dateline/location/city') if elem is not None: self.set_dateline(item, city=elem.text) item.setdefault('word_count', get_word_count(item['body_html'], no_html=True)) except Exception as ex: raise ParserError.nitfParserError(ex, provider) return item
def test_raise_nitfParserError(self): with assert_raises(ParserError) as error_context: try: ex = Exception("Testing nitfParserError") raise ex except Exception: raise ParserError.nitfParserError(ex, self.provider) exception = error_context.exception self.assertTrue(exception.code == 1006) self.assertTrue(exception.message == "NITF input could not be processed") self.assertIsNotNone(exception.system_exception) self.assertEqual(exception.system_exception.args[0], "Testing nitfParserError") self.assertEqual(len(self.mock_logger_handler.messages['error']), 1) self.assertEqual(self.mock_logger_handler.messages['error'][0], "ParserError Error 1006 - NITF input could not be processed: " "Testing nitfParserError on channel TestProvider")
def test_raise_nitfParserError(self): with assert_raises(ParserError) as error_context: try: ex = Exception("Testing nitfParserError") raise ex except Exception: raise ParserError.nitfParserError(ex, self.provider) exception = error_context.exception self.assertTrue(exception.code == 1006) self.assertTrue(exception.message == "NITF input could not be processed") self.assertIsNotNone(exception.system_exception) self.assertEqual(exception.system_exception.args[0], "Testing nitfParserError") self.assertEqual(len(self.mock_logger_handler.messages['error']), 1) self.assertEqual(self.mock_logger_handler.messages['error'][0], "ParserError Error 1006 - NITF input could not be processed: " "Testing nitfParserError on channel TestProvider")
class FileFeedingService(FeedingService): """ Feeding Service class which can read the configured local file system for article(s). """ NAME = 'file' ERRORS = [ ParserError.IPTC7901ParserError().get_error_description(), ParserError.nitfParserError().get_error_description(), ParserError.newsmlOneParserError().get_error_description(), ProviderError.ingestError().get_error_description(), ParserError.parseFileError().get_error_description() ] label = 'File feed' fields = [ { 'id': 'path', 'type': 'text', 'label': 'Server Folder', 'placeholder': 'path to folder', 'required': True, 'errors': {3003: 'Path not found on server.', 3004: 'Path should be directory.'} } ] def _test(self, provider): path = provider.get('config', {}).get('path', None) if not os.path.exists(path): raise IngestFileError.notExistsError() if not os.path.isdir(path): raise IngestFileError.isNotDirError() def _update(self, provider, update): # check if deprecated FILE_INGEST_OLD_CONTENT_MINUTES setting is still used if "FILE_INGEST_OLD_CONTENT_MINUTES" in app.config: deprecated_cont_min = app.config["FILE_INGEST_OLD_CONTENT_MINUTES"] cont_min = app.config[OLD_CONTENT_MINUTES] if deprecated_cont_min != cont_min: logger.warning( "'FILE_INGEST_OLD_CONTENT_MINUTES' is deprecated, please update settings.py to use {new_name!r}" .format(new_name=OLD_CONTENT_MINUTES)) app.config[OLD_CONTENT_MINUTES] = deprecated_cont_min self.provider = provider self.path = provider.get('config', {}).get('path', None) if not self.path: logger.warn('File Feeding Service {} is configured without path. Please check the configuration' .format(provider['name'])) return [] registered_parser = self.get_feed_parser(provider) for filename in get_sorted_files(self.path, sort_by=FileSortAttributes.created): try: last_updated = None file_path = os.path.join(self.path, filename) if os.path.isfile(file_path): last_updated = self.get_last_updated(file_path) if self.is_latest_content(last_updated, provider.get('last_updated')): if isinstance(registered_parser, XMLFeedParser): with open(file_path, 'rb') as f: xml = etree.parse(f) parser = self.get_feed_parser(provider, xml.getroot()) item = parser.parse(xml.getroot(), provider) else: parser = self.get_feed_parser(provider, file_path) item = parser.parse(file_path, provider) self.after_extracting(item, provider) if isinstance(item, list): failed = yield item else: failed = yield [item] self.move_file(self.path, filename, provider=provider, success=not failed) else: self.move_file(self.path, filename, provider=provider, success=False) except Exception as ex: if last_updated and self.is_old_content(last_updated): self.move_file(self.path, filename, provider=provider, success=False) raise ParserError.parseFileError('{}-{}'.format(provider['name'], self.NAME), filename, ex, provider) push_notification('ingest:update') def after_extracting(self, article, provider): """Sub-classes should override this method if something needs to be done to the given article. For example, if the article comes from DPA provider the system needs to derive dateline from the properties in the article. Invoked after parser parses the article received from the provider. :param article: dict having properties that can be saved into ingest collection :type article: dict :param provider: dict - Ingest provider details to which the current directory has been configured :type provider: dict :py:class: `superdesk.io.ingest_provider_model.IngestProviderResource` """ pass def move_file(self, file_path, filename, provider, success=True): """Move the files from the current directory to the _Processed if successful, else _Error if unsuccessful. Creates _Processed and _Error directories within current directory if they don't exist. :param file_path: str - current directory location :param filename: str - file name in the current directory to move :param provider: dict - Ingest provider details to which the current directory has been configured :param success: bool - default value is True. When True moves to _Processed directory else _Error directory. :raises IngestFileError.folderCreateError() if creation of _Processed or _Error directories fails :raises IngestFileError.fileMoveError() if failed to move the file pointed by filename """ try: if not os.path.exists(os.path.join(file_path, "_PROCESSED/")): os.makedirs(os.path.join(file_path, "_PROCESSED/")) if not os.path.exists(os.path.join(file_path, "_ERROR/")): os.makedirs(os.path.join(file_path, "_ERROR/")) except Exception as ex: raise IngestFileError.folderCreateError(ex, provider) try: if success: shutil.copy2(os.path.join(file_path, filename), os.path.join(file_path, "_PROCESSED/")) else: shutil.copy2(os.path.join(file_path, filename), os.path.join(file_path, "_ERROR/")) except Exception as ex: raise IngestFileError.fileMoveError(ex, provider) finally: os.remove(os.path.join(file_path, filename)) def get_last_updated(self, file_path): """Get last updated time for file. Using both mtime and ctime timestamps not to miss old files being copied around and recent files after changes done in place. """ stat = os.lstat(file_path) timestamp = max(stat.st_mtime, stat.st_ctime) return datetime.fromtimestamp(timestamp, tz=utc)
import os import logging from datetime import datetime from .nitf import NITFParser from superdesk.io.file_ingest_service import FileIngestService from superdesk.utc import utc, timezone from superdesk.notification import push_notification from superdesk.io import register_provider from ..etree import etree, ParseError as etreeParserError from superdesk.utils import get_sorted_files, FileSortAttributes from superdesk.errors import ParserError, ProviderError logger = logging.getLogger(__name__) PROVIDER = 'aap' errors = [ParserError.nitfParserError().get_error_description(), ProviderError.ingestError().get_error_description(), ParserError.parseFileError().get_error_description()] class AAPIngestService(FileIngestService): """AAP Ingest Service""" def __init__(self): self.tz = timezone('Australia/Sydney') self.parser = NITFParser() def prepare_href(self, href): return href def _update(self, provider):
class EventFileFeedingService(FileFeedingService): """ Feeding Service class which can read the configured local file system for article(s). """ NAME = 'event_file' ERRORS = [ ParserError.IPTC7901ParserError().get_error_description(), ParserError.nitfParserError().get_error_description(), ParserError.newsmlOneParserError().get_error_description(), ProviderError.ingestError().get_error_description(), ParserError.parseFileError().get_error_description() ] label = 'Event file feed' """ Defines the collection service to be used with this ingest feeding service. """ service = 'events' fields = [{ 'id': 'path', 'type': 'text', 'label': 'Event File Server Folder', 'placeholder': 'path to folder', 'required': True, 'errors': { 3003: 'Path not found on server.', 3004: 'Path should be directory.' } }] def _update(self, provider, update): self.provider = provider self.path = provider.get('config', {}).get('path', None) if not self.path: logger.warn( 'File Feeding Service {} is configured without path. Please check the configuration' .format(provider['name'])) return [] for filename in get_sorted_files(self.path, sort_by=FileSortAttributes.created): try: last_updated = None file_path = os.path.join(self.path, filename) if os.path.isfile(file_path): stat = os.lstat(file_path) last_updated = datetime.fromtimestamp(stat.st_mtime, tz=utc) if self.is_latest_content(last_updated, provider.get('last_updated')): parser = self.get_feed_parser(provider, file_path) logger.info('Ingesting events with {} parser'.format( parser.__class__.__name__)) if getattr(parser, 'parse_file'): with open(file_path, 'rb') as f: item = parser.parse_file(f, provider) else: item = parser.parse(file_path, provider) self.after_extracting(item, provider) self.move_file(self.path, filename, provider=provider, success=True) if isinstance(item, list): yield item else: yield [item] else: self.move_file(self.path, filename, provider=provider, success=True) except Exception as ex: if last_updated and self.is_old_content(last_updated): self.move_file(self.path, filename, provider=provider, success=False) raise ParserError.parseFileError( '{}-{}'.format(provider['name'], self.NAME), filename, ex, provider) push_notification('ingest:update')
class FileFeedingService(FeedingService): """ Feeding Service class which can read the configured local file system for article(s). """ NAME = 'file' ERRORS = [ ParserError.IPTC7901ParserError().get_error_description(), ParserError.nitfParserError().get_error_description(), ParserError.newsmlOneParserError().get_error_description(), ProviderError.ingestError().get_error_description(), ParserError.parseFileError().get_error_description() ] label = 'File Feed' fields = [{ 'id': 'path', 'type': 'text', 'label': 'Server Folder', 'placeholder': 'path to folder', 'required': True, 'errors': { 3003: 'Path not found on server.', 3004: 'Path should be directory.' } }] def _test(self, provider): path = provider.get('config', {}).get('path', None) if not os.path.exists(path): raise IngestFileError.notExistsError() if not os.path.isdir(path): raise IngestFileError.isNotDirError() def _update(self, provider, update): self.provider = provider self.path = provider.get('config', {}).get('path', None) if not self.path: logger.warn( 'File Feeding Service {} is configured without path. Please check the configuration' .format(provider['name'])) return [] registered_parser = self.get_feed_parser(provider) for filename in get_sorted_files(self.path, sort_by=FileSortAttributes.created): try: last_updated = None file_path = os.path.join(self.path, filename) if os.path.isfile(file_path): stat = os.lstat(file_path) last_updated = datetime.fromtimestamp(stat.st_mtime, tz=utc) if self.is_latest_content(last_updated, provider.get('last_updated')): if isinstance(registered_parser, XMLFeedParser): with open(file_path, 'rb') as f: xml = etree.parse(f) parser = self.get_feed_parser( provider, xml.getroot()) item = parser.parse(xml.getroot(), provider) else: parser = self.get_feed_parser(provider, file_path) item = parser.parse(file_path, provider) self.after_extracting(item, provider) self.move_file(self.path, filename, provider=provider, success=True) if isinstance(item, list): yield item else: yield [item] else: self.move_file(self.path, filename, provider=provider, success=True) except Exception as ex: if last_updated and self.is_old_content(last_updated): self.move_file(self.path, filename, provider=provider, success=False) raise ParserError.parseFileError( '{}-{}'.format(provider['name'], self.NAME), filename, ex, provider) push_notification('ingest:update') def after_extracting(self, article, provider): """Sub-classes should override this method if something needs to be done to the given article. For example, if the article comes from DPA provider the system needs to derive dateline from the properties in the article. Invoked after parser parses the article received from the provider. :param article: dict having properties that can be saved into ingest collection :type article: dict :param provider: dict - Ingest provider details to which the current directory has been configured :type provider: dict :py:class: `superdesk.io.ingest_provider_model.IngestProviderResource` """ pass def move_file(self, file_path, filename, provider, success=True): """Move the files from the current directory to the _Processed if successful, else _Error if unsuccessful. Creates _Processed and _Error directories within current directory if they don't exist. :param file_path: str - current directory location :param filename: str - file name in the current directory to move :param provider: dict - Ingest provider details to which the current directory has been configured :param success: bool - default value is True. When True moves to _Processed directory else _Error directory. :raises IngestFileError.folderCreateError() if creation of _Processed or _Error directories fails :raises IngestFileError.fileMoveError() if failed to move the file pointed by filename """ try: if not os.path.exists(os.path.join(file_path, "_PROCESSED/")): os.makedirs(os.path.join(file_path, "_PROCESSED/")) if not os.path.exists(os.path.join(file_path, "_ERROR/")): os.makedirs(os.path.join(file_path, "_ERROR/")) except Exception as ex: raise IngestFileError.folderCreateError(ex, provider) try: if success: shutil.copy2(os.path.join(file_path, filename), os.path.join(file_path, "_PROCESSED/")) else: shutil.copy2(os.path.join(file_path, filename), os.path.join(file_path, "_ERROR/")) except Exception as ex: raise IngestFileError.fileMoveError(ex, provider) finally: os.remove(os.path.join(file_path, filename)) def is_latest_content(self, last_updated, provider_last_updated=None): """ Parse file only if it's not older than provider last update -10m """ if not provider_last_updated: provider_last_updated = utcnow() - timedelta(days=7) return provider_last_updated - timedelta(minutes=10) < last_updated def is_old_content(self, last_updated): """Test if file is old so it wouldn't probably work in is_latest_content next time. Such files can be moved to `_ERROR` folder, it wouldn't be ingested anymore. :param last_updated: file last updated datetime """ return last_updated < utcnow() - timedelta(minutes=10)
def parse(self, xml, provider=None): if self.metadata_mapping is None: self._generate_mapping() item = { ITEM_TYPE: CONTENT_TYPE.TEXT, # set the default type. } try: for key, mapping in self.metadata_mapping.items(): if not mapping: # key is ignored continue try: xpath = mapping['xpath'] except KeyError: # no xpath, we must have a callable try: value = mapping['callback'](xml) except KeyError: logging.warn( "invalid mapping for key {}, ignoring it".format( key)) continue except SkipValue: continue else: elem = xml.find(xpath) if elem is None: try: value = mapping['default'] except KeyError: # if there is not default value we skip the key continue else: # we have an element, # do we want a filter, an attribute or the content? try: # filter value = mapping['filter'](elem) except KeyError: try: attribute = mapping['attribute'] except KeyError: # content value = ''.join(elem.itertext()) else: # attribute value = elem.get(attribute, mapping.get('default_attr')) try: # filter_value is applied on found value value = mapping['filter_value'](value) except KeyError: pass if 'key_hook' in mapping: mapping['key_hook'](item, value) else: item[key] = value elem = xml.find('body/body.head/dateline/location/city') if elem is not None: self.set_dateline(item, city=elem.text) item.setdefault('word_count', get_word_count(item['body_html'])) return item except Exception as ex: raise ParserError.nitfParserError(ex, provider)
def parse(self, xml, provider=None): if self.metadata_mapping is None: self._generate_mapping() item = {ITEM_TYPE: CONTENT_TYPE.TEXT} # set the default type. try: for key, mapping in self.metadata_mapping.items(): if not mapping: # key is ignored continue try: xpath = mapping["xpath"] except KeyError: # no xpath, we must have a callable try: value = mapping["callback"](xml) except KeyError: logging.warn("invalid mapping for key {}, ignoring it".format(key)) continue except SkipValue: continue else: elem = xml.find(xpath) if elem is None: try: value = mapping["default"] except KeyError: # if there is not default value we skip the key continue else: # we have an element, # do we want a filter, an attribute or the content? try: # filter value = mapping["filter"](elem) except KeyError: try: attribute = mapping["attribute"] except KeyError: # content value = "".join(elem.itertext()) else: # attribute value = elem.get(attribute, mapping.get("default_attr")) try: # filter_value is applied on found value value = mapping["filter_value"](value) except KeyError: pass if "key_hook" in mapping: mapping["key_hook"](item, value) else: item[key] = value elem = xml.find("body/body.head/dateline/location/city") if elem is not None: self.set_dateline(item, city=elem.text) item.setdefault("word_count", get_word_count(item["body_html"])) return item except Exception as ex: raise ParserError.nitfParserError(ex, provider)
class EventFileFeedingService(FileFeedingService): """ Feeding Service class which can read the configured local file system for article(s). """ NAME = 'event_file' ERRORS = [ ParserError.IPTC7901ParserError().get_error_description(), ParserError.nitfParserError().get_error_description(), ParserError.newsmlOneParserError().get_error_description(), ProviderError.ingestError().get_error_description(), ParserError.parseFileError().get_error_description() ] label = 'Event File Feed' """ Defines the collection service to be used with this ingest feeding service. """ service = 'events' def _update(self, provider, update): self.provider = provider self.path = provider.get('config', {}).get('path', None) if not self.path: logger.warn( 'File Feeding Service {} is configured without path. Please check the configuration' .format(provider['name'])) return [] registered_parser = self.get_feed_parser(provider) for filename in get_sorted_files(self.path, sort_by=FileSortAttributes.created): try: last_updated = None file_path = os.path.join(self.path, filename) if os.path.isfile(file_path): stat = os.lstat(file_path) last_updated = datetime.fromtimestamp(stat.st_mtime, tz=utc) if self.is_latest_content(last_updated, provider.get('last_updated')): if isinstance(registered_parser, NTBEventXMLFeedParser): logger.info('Ingesting xml events') with open(file_path, 'rb') as f: xml = ElementTree.parse(f) parser = self.get_feed_parser( provider, xml.getroot()) item = parser.parse(xml.getroot(), provider) elif isinstance(registered_parser, IcsTwoFeedParser): logger.info('Ingesting ics events') with open(file_path, 'rb') as f: cal = Calendar.from_ical(f.read()) parser = self.get_feed_parser(provider, cal) item = parser.parse(cal, provider) else: logger.info('Ingesting events with unknown parser') parser = self.get_feed_parser(provider, file_path) item = parser.parse(file_path, provider) self.after_extracting(item, provider) self.move_file(self.path, filename, provider=provider, success=True) if isinstance(item, list): yield item else: yield [item] else: self.move_file(self.path, filename, provider=provider, success=True) except Exception as ex: if last_updated and self.is_old_content(last_updated): self.move_file(self.path, filename, provider=provider, success=False) raise ParserError.parseFileError( '{}-{}'.format(provider['name'], self.NAME), filename, ex, provider) push_notification('ingest:update')