def on_push_button_start_released(self): self.push_button_test.setEnabled(False) self.push_button_start.setEnabled(False) url = parse_url(self.line_edit_server_address.text()) account = Account() account.assign_autogenerated_id() account.description = '' account.account_root_directory = account.id account.oxnote_home_folder = Configuration().get_setting( 'drive_client', 'api.defaults.oxnote_home_folder.name', default='.oxnote') account.application_data_folder = Configuration().get_setting( 'drive_client', 'api.defaults.application_data_folder.name', default='.oxnote') account.url_scheme = url.scheme if url.scheme and url.scheme in ( 'http', 'https') else 'https' account.url_host = url.host if url.host else None account.url_port = url.port if url.port else None account.url_uri = url.request_uri if url.request_uri and url.request_uri != '/' else '/ajax/' account.username = self.line_edit_username.text() account.password = self.line_edit_password.text() account.context_id = '' account.user_id = '' account.enabled = True account.drive_quota = -1 AccountManager().save_account_configuration(account) self.close() self._oxnote_main_window_widget = MainWindow() self._oxnote_main_window_widget.show()
class PiHome(App): def __init__(self, **kwargs): super(PiHome, self).__init__(**kwargs) self.base_config = Configuration('base.ini') self.height = self.base_config.get_int('window', 'height', 480) self.width = self.base_config.get_int('window', 'width', 800) def setup(self): Window.size = (self.width, self.height) # the root widget def build(self): self.setup() button = Button(text=self.base_config.get('test', 'phrase', 'quit')) button.bind(on_press=lambda _: PiHome.get_running_app().stop()) reveal = Reveal() reveal2 = Reveal() reveal3 = Reveal() reveal.add_top_widget(Label(text="PiHome")) reveal.add_bottom_widget(button) reveal2.add_top_widget(Label(text="Another one")) reveal2.add_bottom_widget(Label(text="bottom")) layout = GridLayout(rows=4) layout.add_widget(Button(text="test")) layout.add_widget(reveal) layout.add_widget(reveal2) layout.add_widget(Reveal()) return layout
def bootstrap_account_directories(account): from util.configuration import Configuration accounts_directory = os.path.join( Environment.get_base_path(), Configuration().get_setting('oxnote', 'application.directories.accounts', default='.oxnote/accounts')) directories = [ os.path.join(accounts_directory, account.account_root_directory), os.path.join(accounts_directory, account.account_root_directory, account.oxnote_home_folder), os.path.join(accounts_directory, account.account_root_directory, account.application_data_folder) ] for directory in directories: if not os.path.isdir( os.path.join(Environment.get_base_path(), directory)): logger.debug('Bootstrapping account directory: {}'.format( os.path.join(Environment.get_base_path(), directory))) os.makedirs( os.path.join(Environment.get_base_path(), directory))
class MongoParsingTestCase(BaseMultipleMessageParsingTestCase): event_creators = create_event_creators( Configuration(dict={"timezone": { "name": "Europe/Amsterdam" }})) def test_mongo(self): self.assert_parsing( { "source": "/penthera/logs/mongodb/mongo.log", "message": "2018-04-11T07:49:19.072+0000 I NETWORK [conn1564572] end connection 172.16.145.9:54914 (1141 connections now open)" }, { "@timestamp": datetime( 2018, 4, 11, 7, 49, 19, 72000, ).replace(tzinfo=pytz.utc), "level": "I", "event_type": "NETWORK", "thread": "conn1564572", "message": "end connection 172.16.145.9:54914 (1141 connections now open)", })
def get_account_root_directory_path(self) -> (str, None): if self._account_root_directory and self._oxnote_home_folder: return os.path.join( Environment.get_base_path(), Configuration().get_setting('oxnote', 'application.directories.accounts', default='.oxnote/accounts'), self._account_root_directory)
def insertFromMimeData(self, source: QMimeData, disable_richtext: bool = False): ''' ..todo: Add support for embedded content when inserting html mime from clipboard ''' if source.hasImage(): temporary_file = os.path.join( Environment.get_base_path(), Configuration().get_setting( 'oxnote', 'application.directories.temporary', default='.oxnote/tmp'), '{}.png'.format( (str(uuid.uuid4())))) source.imageData().save(temporary_file) with open(temporary_file, 'rb') as f: encoded = base64.b64encode(f.read()) self.textCursor().insertImage('data:image/png;base64,{}'.format( encoded.decode("utf-8"))) if os.path.isfile(temporary_file): os.remove(temporary_file) elif source.hasUrls(): for url in source.urls(): if pathlib.Path(url.fileName()).suffix.lower( )[1:] not in self.supported_image_formats: super().insertFromMimeData(source) continue file_extension = pathlib.Path( url.fileName()).suffix.lower()[1:] if url.isLocalFile(): if not os.path.isfile(url.toLocalFile()): continue with open(url.toLocalFile(), 'rb') as f: self.textCursor().insertImage( 'data:image/png;base64,{}'.format( base64.b64encode(f.read()).decode("utf-8"))) else: response = requests.get(url.toString(), stream=True) if response.status_code == 200: self.textCursor().insertImage( 'data:image/{};base64,{}'.format( file_extension, base64.b64encode( response.content).decode("utf-8"))) elif source.hasHtml() and disable_richtext: self.textCursor().insertText(source.text()) else: super().insertFromMimeData(source)
def bootstrap_application_directories(): from util.configuration import Configuration directories = [ Configuration().get_setting('oxnote', 'application.directories.workspace', default='.oxnote'), Configuration().get_setting('oxnote', 'application.directories.accounts', default='.oxnote/accounts'), Configuration().get_setting('oxnote', 'application.directories.temporary', default='.oxnote/tmp') ] for directory in directories: if not os.path.isdir( os.path.join(Environment.get_base_path(), directory)): logger.debug('Bootstrapping application directory: {}'.format( os.path.join(Environment.get_base_path(), directory))) os.makedirs( os.path.join(Environment.get_base_path(), directory))
def main(): logger.debug('Initiated logging environment') Environment.initiate_logging_environment(Configuration().get_setting( 'oxnote', 'extended_logging.requests_debug_level', default=0)) logger.debug('Starting OXNote') global application application = Application() if not AccountManager().list_accounts(): application.start_wizard() else: application.start_main_window()
def __load_note_to_list(self, account_id: str, file: str, refresh_list: bool = True): logger.debug( '(Re)loading note from account {} to the note list: {}'.format( account_id, file)) list_item = None list_item_widget = None list = self.list_widget_notelist.findItems(file, Qt.MatchExactly) if len(list) == 1: list_item = list[0] list_item_widget = self.list_widget_notelist.itemWidget(list_item) elif len(list) > 1: logger.warning( 'Found multiple widgets for filename \'{}\''.format(file)) return if not list_item or not list_item.text() or len(list_item.text()) <= 0: list_item_widget = NotesListWidgetItemWidget( filename=file, checksum=Util.calculate_file_checksum(file)) list_item = SortableListWidgetItem( Note(account_id=account_id, file=file)) list_item.setSizeHint( QSize(self.list_widget_notelist.width(), list_item_widget.height())) list_item.setFont(QFont()) self.list_widget_notelist.addItem(list_item) self.list_widget_notelist.setItemWidget(list_item, list_item_widget) list_item_widget.set_title(list_item.note.title) list_item_widget.set_preview_text( list_item.note.document.toPlainText()[:Configuration().get_setting( 'oxnote', 'application.settings.preview_content_length_maximum', default=200)]) list_item_widget.set_icon(list_item.note.note_list_preview_image) list_item.setText(file) if refresh_list: self.__refresh_notes_list()
class TraxisCassandraMessageParsingTestCase(BaseMultipleMessageParsingTestCase ): event_creators = create_event_creators( Configuration(dict={"timezone": { "name": "Europe/Amsterdam" }})) def test_traxis_cassandra(self): test_cases = [ ({ "topic": "vagrant_in_eosdtv_be_prd_heapp_traxis_cassandra_log_err_v1", "message": "ServiceHost: 2016-11-08 08:05:05,490 ERROR [92] MaintenanceController - Eventis.Cassandra.Service.CassandraServiceException+HostGeneralException: Error from nodetool: Keyspace [Traxis] does not exist." }, { "@timestamp": datetime(2016, 11, 8, 8, 5, 5, 490000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "ERROR", "message": "MaintenanceController - Eventis.Cassandra.Service.CassandraServiceException+HostGeneralException: Error from nodetool: Keyspace [Traxis] does not exist." }), ({ "topic": "vagrant_in_eosdtv_be_prd_heapp_traxis_cassandra_log_gen_v1", "message": "Cassandra: Eventis.Cassandra.Service.CassandraServiceException+CassandraWarningException: WARN [main] 2017-01-20 08:11:39,729 No host ID found, created 3e901daf-d150-4e40-ba33-bc09b9c04158 (Note: This should happen exactly once per node)." }, { "@timestamp": datetime(2017, 1, 20, 8, 11, 39, 729000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "WARN", "message": "No host ID found, created 3e901daf-d150-4e40-ba33-bc09b9c04158 (Note: This should happen exactly once per node)." }) ] for test_message, parsed_message in test_cases: self.assert_parsing(test_message, parsed_message)
class TraxisBackendParsingTestCase(BaseMultipleMessageParsingTestCase): """ A unit test to check a work of event creators for the Traxis Backend log parsing job. """ event_creators = create_event_creators( Configuration(dict={"timezone": { "name": "Europe/Amsterdam" }})) def test_traxis_backend(self): test_cases = [ ({ "source": "TraxisService.log", "topic": "vagrant_in_eosdtv_be_prd_heapp_traxis_backend_log_gen_v1", "message": "2017-10-03 15:47:00,109 DEBUG [149] WcfHelper - " "Wcf call 'GetPromotionRuleLogData' took '10' ms" }, { "@timestamp": datetime(2017, 10, 3, 15, 47, 0, 109000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "DEBUG", "message": "WcfHelper - Wcf call 'GetPromotionRuleLogData' took '10' ms" }), ({ "source": "TraxisService.log", "topic": "vagrant_in_eosdtv_be_prd_heapp_traxis_backend_log_gen_v1", "message": "2017-07-10 00:37:23,829 INFO [HTTP worker thread 18] OnlineTvaIngest - " "[172.30.182.17:55347] [RequestId = 5a903fc1-02a5-4424-8c16-4078f83df6c4] " "Tva notification received" }, { "@timestamp": datetime(2017, 7, 10, 0, 37, 23, 829000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "message": "OnlineTvaIngest - [172.30.182.17:55347] [RequestId = 5a903fc1-02a5-4424-8c16-4078f83df6c4] " "Tva notification received", "activity": "OnlineTvaIngest", "request_id": "5a903fc1-02a5-4424-8c16-4078f83df6c4" }), ({ "source": "TraxisService.log", "topic": "vagrant_in_eosdtv_be_prd_heapp_traxis_backend_log_gen_v1", "message": "2017-07-10 00:37:26,417 INFO [105] TvaManager - [Task = Notification of type " "'TvaIngestCompleted'] Loading tva version '233.1221' took '314' ms" }, { "@timestamp": datetime(2017, 7, 10, 0, 37, 26, 417000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "message": "TvaManager - [Task = Notification of type 'TvaIngestCompleted'] " "Loading tva version '233.1221' took '314' ms", "activity": "TvaManager", "task": "Notification of type 'TvaIngestCompleted'", "duration_ms": 314 }), ({ "source": "TraxisService.log", "topic": "vagrant_in_eosdtv_be_prd_heapp_traxis_backend_log_gen_v1", "message": "2017-07-10 00:39:25,522 INFO [160] ParsingContext - [Task = TvaManagementExpirationCheck] " "Tva ingest completed, duration = 1478 ms, new version = 233.1222, entities set = 2, " "deleted = 1 (total = 425429), Event set = 0, deleted = 1 (total = 200840), Channel " "set = 1, deleted = 0 (total = 488), Title set = 1, deleted = 0 (total = 191667)" }, { "@timestamp": datetime(2017, 7, 10, 0, 39, 25, 522000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "message": "ParsingContext - [Task = TvaManagementExpirationCheck] Tva ingest completed, duration = " "1478 ms, new version = 233.1222, entities set = 2, deleted = 1 (total = 425429), Event " "set = 0, deleted = 1 (total = 200840), Channel set = 1, deleted = 0 (total = 488), " "Title set = 1, deleted = 0 (total = 191667)", "activity": "ParsingContext", "task": "TvaManagementExpirationCheck", "duration_ms": 1478 }), ({ "source": "TraxisService.log", "topic": "vagrant_in_eosdtv_be_prd_heapp_traxis_backend_log_gen_v1", "message": "2017-07-10 00:40:25,641 INFO [149] ParsingContext - [Task = TvaManagementExpirationCheck] " "Number of write actions queued = 125. Action took 110 ms" }, { "@timestamp": datetime(2017, 7, 10, 0, 40, 25, 641000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "message": "ParsingContext - [Task = TvaManagementExpirationCheck] " "Number of write actions queued = 125. Action took 110 ms", "activity": "ParsingContext", "task": "TvaManagementExpirationCheck", "duration_ms": 110 }), ({ "source": "TraxisServiceLogManagement.log", "topic": "vagrant_in_eosdtv_be_prd_heapp_traxis_backend_log_gen_v1", "message": "2017-10-03 15:54:30,103 DEBUG [113] Logger`1 - Retrieving '0' rows took '7' ms. Pages processed = '2'" }, { "@timestamp": datetime(2017, 10, 3, 15, 54, 30, 103000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "DEBUG", "message": "Logger`1 - Retrieving '0' rows took '7' ms. Pages processed = '2'" }), ({ "source": "TraxisServiceDistributedScheduler.log", "topic": "vagrant_in_eosdtv_be_prd_heapp_traxis_backend_log_gen_v1", "message": "2017-09-26 16:58:57,079 INFO [61] DistributedScheduler - [Task = DistributedScheduler.Slave] Assignments in database too old '2017-09-26T15:58:33Z'. Waiting until a more recent version is written by the master" }, { "@timestamp": datetime(2017, 9, 26, 16, 58, 57, 79000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "message": "DistributedScheduler - [Task = DistributedScheduler.Slave] Assignments in database too old '2017-09-26T15:58:33Z'. Waiting until a more recent version is written by the master" }), ({ "topic": "vagrant_in_eosdtv_be_prd_heapp_traxis_backend_log_err_v1", "message": "2017-09-27 11:35:20,711 ERROR [118] MachineTimeCheck - [Task = Eventis.Traxis.Service.Ntp.MachineTimeCheck] Eventis.Traxis.Service.ServiceException+NetworkTimeCheckError: NetworkTime error: Time difference between this machine and machine '10.95.97.59' is '2848' ms. This exceeds the configured threshold of '2000' ms" }, { "@timestamp": datetime(2017, 9, 27, 11, 35, 20, 711000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "ERROR", "message": "MachineTimeCheck - [Task = Eventis.Traxis.Service.Ntp.MachineTimeCheck] Eventis.Traxis.Service.ServiceException+NetworkTimeCheckError: NetworkTime error: Time difference between this machine and machine '10.95.97.59' is '2848' ms. This exceeds the configured threshold of '2000' ms" }) ] for test_message, parsed_message in test_cases: self.assert_parsing(test_message, parsed_message)
class F5MessageParsingTestCase(BaseMultipleMessageParsingTestCase): event_creators = create_event_creators( Configuration(dict={"timezone": { "name": "Europe/Amsterdam" }})) def test_f5_message_log_parsing(self): self.assert_parsing( { 'message': 'Hostname="nl-srk03a-fe-vlb-lgcf12.aorta.net",Entity="Traffic",AggrInterval="10",EOCTimestamp="1521457813",RequestStartTimestamp="1521457813",ResponseStartTimestamp="1521457813",AVRProfileName="/Common/PFL_ANALYTICS_HTTP_SP_OBO",ApplicationName="<Unassigned>",VSName="/Common/OBO_ODH_DEU_443",POOLIP="2001:db8:1:0:ef21:0:1b1c:fe02",POOLIPRouteDomain="0",POOLPort="0",URLString="/report",ClientIP="213.46.252.136",ClientIPRouteDomain="0",ClientPort="50634",UserAgentString="",MethodString="POST",ResponseCode="0",GeoCode="NL",ServerLatency="0",RequestSize="5170",ResponseSize="0",RequestHeader="POST /report HTTP/1.1\\r\\nHost: obousage.prod.de.dmdsdp.com\\r\\nContent-Length: 4988\\r\\ncontent-type: application/json\\r\\nX-Forwarded-For: 213.46.252.136\\r\\nX-dev: 3C36E4-EOSSTB-003392565903\\r\\n\\r\\n",RequestHeaderTruncated="0",ResponseHeaderTruncated="0",RequestPayloadTruncated="0",ResponsePayloadTruncated="0",MitigatedByDoSL7="0"' }, { "@timestamp": datetime(2018, 3, 19, 11, 10, 13).replace(tzinfo=timezones["Europe/Amsterdam"]), "aggr_interval": "10", "application_name": "<Unassigned>", "avr_profile_name": "/Common/PFL_ANALYTICS_HTTP_SP_OBO", "client_ip": "213.46.252.136", "client_ip_route_domain": "0", "client_port": "50634", "content-length": "4988", "content-type": "application/json", "entity": "Traffic", "eoc_timestamp": "1521457813", "geo_code": "NL", "host": "obousage.prod.de.dmdsdp.com", "hostname": "nl-srk03a-fe-vlb-lgcf12.aorta.net", "method_string": "POST", "mitigated_by_do_sl7": "0", "pool_port": "0", "poolip": "2001:db8:1:0:ef21:0:1b1c:fe02", "poolip_route_domain": "0", "request_header": 'POST /report HTTP/1.1\\r\\nHost: obousage.prod.de.dmdsdp.com\\r\\nContent-Length: 4988\\r\\ncontent-type: application/json\\r\\nX-Forwarded-For: 213.46.252.136\\r\\nX-dev: 3C36E4-EOSSTB-003392565903\\r\\n\\r\\n', "request_header_truncated": "0", "request_payload_truncated": "0", "request_size": "5170", "request_start_timestamp": "1521457813", "response_code": "0", "response_header_truncated": "0", "response_payload_truncated": "0", "response_size": "0", "response_start_timestamp": "1521457813", "server_latency": "0", "url_string": "/report", "vs_name": "/Common/OBO_ODH_DEU_443", "x-dev": "3C36E4-EOSSTB-003392565903", "x-forwarded-for": "213.46.252.136" })
class PosterServerMessageParsingTestCase(BaseMultipleMessageParsingTestCase): event_creators = create_event_creators( Configuration(dict={"timezone": { "name": "Europe/Amsterdam" }})) def test_parse_poster_server_log(self): self.assert_parsing( { 'source': 'PosterServer.log', 'message': '2017-09-04 12:30:33,740 WARN Config OverlaysDirectory not set' }, { '@timestamp': datetime(2017, 9, 4, 12, 30, 33, 740000).replace(tzinfo=timezones["Europe/Amsterdam"]), 'level': 'WARN', 'module': 'Config', 'message': 'OverlaysDirectory not set' }) def test_parse_poster_server_log_crid(self): self.assert_parsing( { 'source': 'PosterServer.log', 'message': '2018-04-03 13:42:01,986 INFO ImageTransformer Queueing the generation of (D:\\PosterServer\\.resized\\OndemandImages\\BE\\PI\\crid~~3A~~2F~~2Ftelenet.be~~2F21a46636-c0e8-4865-a752-94a1ce45eda2\\120x0_Box_96x96dpi_Jpg\\95d0cf239b261388c554b3fae1d65e78.jpg)' }, { '@timestamp': datetime(2018, 4, 3, 13, 42, 1, 986000).replace(tzinfo=timezones["Europe/Amsterdam"]), 'level': 'INFO', 'module': 'ImageTransformer', 'message': 'Queueing the generation of (D:\\PosterServer\\.resized\\OndemandImages\\BE\\PI\\crid~~3A~~2F~~2Ftelenet.be~~2F21a46636-c0e8-4865-a752-94a1ce45eda2\\120x0_Box_96x96dpi_Jpg\\95d0cf239b261388c554b3fae1d65e78.jpg)', 'crid': 'crid~~3A~~2F~~2Ftelenet.be~~2F21a46636-c0e8-4865-a752-94a1ce45eda2' }) def test_parse_poster_server_error_log(self): self.assert_parsing( { 'source': 'PosterServer.Error.log', 'message': '2017-09-04 12:30:33,740 ERROR ResizerModule File not found (EventImages/1.jpg)' }, { '@timestamp': datetime(2017, 9, 4, 12, 30, 33, 740000).replace(tzinfo=timezones["Europe/Amsterdam"]), 'level': 'ERROR', 'module': 'ResizerModule', 'message': 'File not found (EventImages/1.jpg)' })
class ThinkAnalyticsMessageParsingTestCase(BaseMultipleMessageParsingTestCase): event_creators = create_event_creators( Configuration(dict={"timezone": { "name": "Europe/Amsterdam" }})) def test_httpaccess(self): self.assert_parsing( { "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_thinkanalytics_httpaccess_log_v1", "source": "localhost_access_log", "message": "[04/Apr/2018:16:47:05 +0200] 10.95.96.119 http-0.0.0.0-8080-5 GET /RE/REController.do?clientType=300&actionId=3&subscriberId=5ec2ed6f-a5c4-4afe-8970-39b9c57fc39c_be#MasterProfile&contentSourceId=1&actionTime=1522853226&method=learnAction&contentItemId=crid://telenet.be/e36c8aef-2934-45bd-ae22-244c8e796d6d&contentItemInstanceId=imi:001000000020B7F0 HTTP/1.1 200 5" }, { "@timestamp": datetime(2018, 4, 4, 16, 47, 5).replace(tzinfo=timezones["Europe/Amsterdam"]), "ip": "10.95.96.119", "thread": "http-0.0.0.0-8080-5", "http_method": "GET", "http_version": "HTTP/1.1", "response_code": "200", "response_time": "5", "contentSourceId": "1", 'contentItemInstanceId': 'imi:001000000020B7F0', 'contentItemId': 'crid://telenet.be/e36c8aef-2934-45bd-ae22-244c8e796d6d', 'crid': 'crid://telenet.be/e36c8aef-2934-45bd-ae22-244c8e796d6d', "clientType": "300", "method": "learnAction", "traxis-profile-id": "5ec2ed6f-a5c4-4afe-8970-39b9c57fc39c_be#MasterProfile", "action": "/RE/REController.do", 'actionId': '3', 'actionTime': '1522853226' }) def test_httpaccess_ids(self): self.assert_parsing( { "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_thinkanalytics_httpaccess_log_v1", "source": "localhost_access_log", "message": "[28/Mar/2018:13:49:24 +0200] 127.0.0.1 http-0.0.0.0-8080-34 GET /RE/REController.do?allowPreviousRecommendations=false&term=Smurfen&intRequestId=ee3b0ec0-55be-445c-878b-4c66bac1320f_be#MasterProfile1522237764600&subscriberId=ee3b0ec0-55be-445c-878b-4c66bac1320f_be#MasterProfile&queryLanguage=nl&searchField=title&searchField=seriestitle&searchField=people&method=search&applyMarketingBias=true&contentSourceId=1&contentSourceId=2&contentSourceId=3&maxResults=10&clientType=335 HTTP/1.1 200 5" }, { "@timestamp": datetime(2018, 3, 28, 13, 49, 24).replace(tzinfo=timezones["Europe/Amsterdam"]), "ip": "127.0.0.1", "thread": "http-0.0.0.0-8080-34", "http_method": "GET", "http_version": "HTTP/1.1", "response_code": "200", "response_time": "5", "allowPreviousRecommendations": "false", "applyMarketingBias": "true", "contentSourceId": "3", "clientType": "335", "traxis-profile-id": "ee3b0ec0-55be-445c-878b-4c66bac1320f_be#MasterProfile", "request-id": "ee3b0ec0-55be-445c-878b-4c66bac1320f_be#MasterProfile1522237764600", "maxResults": "10", "method": "search", "queryLanguage": "nl", "searchField": "people", "term": "Smurfen", "action": "/RE/REController.do" }) def test_resystemout(self): self.assert_parsing( { "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_thinkanalytics_resystemout_log_v1", "source": "RE_SystemOut.log", "message": "[29/09/17 13:00:23.944 CEST] WARN - RecommendationServiceController.handleRequestInternal(211) : [DAWN_0107] - Failed to get customer data from Traxis for profile Id: Jef_be_be~~23MasterProfile: [DAWN_0103] - Error calling Traxis.Web for Jef_be_be~~23MasterProfile: HTTP Code: 400, HTTP Message: Bad Request, Traxis Message: Invalid parameter 'ProfileId', value 'Jef_be_be#MasterProfile'" }, { "@timestamp": datetime(2017, 9, 29, 13, 00, 23, 944000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "WARN", "script": "RecommendationServiceController.handleRequestInternal(211)", "message": "[DAWN_0107] - Failed to get customer data from Traxis for profile Id: Jef_be_be~~23MasterProfile: [DAWN_0103] - Error calling Traxis.Web for Jef_be_be~~23MasterProfile: HTTP Code: 400, HTTP Message: Bad Request, Traxis Message: Invalid parameter 'ProfileId', value 'Jef_be_be#MasterProfile'" }) def test_remonsystemout(self): self.assert_parsing( { "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_thinkanalytics_remonsystemout_log_v1", "source": "REMON_SystemOut.log", "message": "[29/09/17 01:15:00.141 CEST] WARN - LGITopListManager.validateTopLists(113) : [NO_ENTRIES_FOR_EXPECTED_TOP_LIST] - Expected Top List MostPurchased^TVOD_Currents is missing or has no entries." }, { "@timestamp": datetime(2017, 9, 29, 1, 15, 00, 141000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "WARN", "script": "LGITopListManager.validateTopLists(113)", "type": "NO_ENTRIES_FOR_EXPECTED_TOP_LIST", "message": "Expected Top List MostPurchased^TVOD_Currents is missing or has no entries." }) def test_central(self): self.assert_parsing( { "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_thinkanalytics_central_log_v1", "source": "Central.log", "message": '''"Thu 05/10/17","02:50:01","","Event Log Stopped","be-l-p-obo00336","","","","Customer"''' }, { "@timestamp": datetime(2017, 10, 5, 2, 50, 1).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "", "message": "Event Log Stopped", "thread": "be-l-p-obo00336", "c0": "", "c1": "", "c2": "", "role": "Customer" }) def test_thinkenterprise(self): self.assert_parsing( { "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_thinkenterprise_central_log_v1", "source": "thinkenterprise.log", "message": "2017-09-29 02:50:44,608: INFO - ThinkEnterprise: rmi://be-l-p-obo00335:55969" }, { "@timestamp": datetime(2017, 9, 29, 2, 50, 44, 608000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "message": "ThinkEnterprise: rmi://be-l-p-obo00335:55969" }) def test_gcollector(self): self.assert_parsing( { "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_gcollector_central_log_v1", "source": "gcollector.log", "message": "2017-09-29T07:03:38.835+0200: 908973.815: [GC [1 CMS-initial-mark: 997339K(1398144K)] 1032156K(2027264K), 0.0337620 secs] [Times: user=0.03 sys=0.00, real=0.04 secs]" }, { "@timestamp": datetime(2017, 9, 29, 7, 3, 38, 835000).replace(tzinfo=timezones["Europe/Amsterdam"]), "process_uptime": "908973.815", "message": "[GC [1 CMS-initial-mark: 997339K(1398144K)] 1032156K(2027264K), 0.0337620 secs] [Times: user=0.03 sys=0.00, real=0.04 secs]" }) def test_server(self): self.assert_parsing( { "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_gcollector_server_log_v1", "source": "server.log", "message": "2017-10-05 15:07:27,281 WARN [com.mchange.v2.resourcepool.BasicResourcePool] (C3P0PooledConnectionPoolManager[identityToken->2vlxe59qgs2ym41at6ny1|2efd4b56, dataSourceName->creRepStatus]-HelperThread-#5) com.mchange.v2.resourcepool.BasicResourcePool$ScatteredAcquireTask@90a251f -- Acquisition Attempt Failed!!! Clearing pending acquires. While trying to acquire a needed new resource, we failed to succeed more than the maximum number of allowed acquisition attempts (30). Last acquisition attempt exception: : java.sql.SQLException: ORA-01017: invalid username/password; logon denied" }, { "@timestamp": datetime(2017, 10, 5, 15, 7, 27, 281000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "WARN", "class_name": "com.mchange.v2.resourcepool.BasicResourcePool", "thread": "C3P0PooledConnectionPoolManager[identityToken->2vlxe59qgs2ym41at6ny1|2efd4b56, dataSourceName->creRepStatus]-HelperThread-#5", "message": "com.mchange.v2.resourcepool.BasicResourcePool$ScatteredAcquireTask@90a251f -- Acquisition Attempt Failed!!! Clearing pending acquires. While trying to acquire a needed new resource, we failed to succeed more than the maximum number of allowed acquisition attempts (30). Last acquisition attempt exception: : java.sql.SQLException: ORA-01017: invalid username/password; logon denied" }) def test_reingest(self): self.assert_parsing( { "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_gcollector_reingest_log_v1", "source": "RE_Ingest.log", "message": """-- Start of RE_SocialModel.log -- Started ./runBuildSocialModel.sh Mon Aug 28 15:45:09 CEST 2017 Started /apps/ThinkAnalytics/ModelAnalysis/bin/buildModelAndRefreshMemory.sh Mon Aug 28 15:45:09 CEST 2017 Started /apps/ThinkAnalytics/ModelAnalysis/bin/buildSocialModel.sh Mon Aug 28 15:45:09 CEST 2017 Buildfile: install-run.xml check_kwiz_libs: runVODSubscriberPrepareBuildData: [Java[RunPlanTask]] java.lang.Exception: Failed to run plan:VODSubscriberPrepareBuildData [Java[RunPlanTask]] at com.thinkanalytics.re.ant.RunPlan.execute(RunPlan.java:279) [Java[RunPlanTask]] at com.thinkanalytics.re.ant.RunPlan.main(RunPlan.java:370) [Java[RunPlanTask]] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) [Java[RunPlanTask]] at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) [Java[RunPlanTask]] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) [Java[RunPlanTask]] at java.lang.reflect.Method.invoke(Method.java:606) [Java[RunPlanTask]] at com.kwiz.kd.util.KwizLoader.main(Unknown Source) BUILD FAILED /app/apps/ThinkAnalytics/ModelAnalysis/setup/install-run.xml:20: Failed to run plan (VODSubscriberPrepareBuildData) reason:Java returned: 1 Total time: 12 seconds [28/08/2017-15:45:23-CEST] ERROR - FAILED to run social model building. [28/08/2017-15:45:23-CEST] ERROR - FAILED to rebuild the social model Finished ./runBuildSocialModel.sh Mon Aug 28 15:45:23 CEST 2017""" }, { "@timestamp": datetime(2017, 8, 28, 15, 45, 9).replace(tzinfo=timezones["Europe/Amsterdam"]), "started_script": "./runBuildSocialModel.sh", "finished_script": "./runBuildSocialModel.sh", "finished_time": datetime(2017, 8, 28, 15, 45, 23).replace(tzinfo=timezones["Europe/Amsterdam"]), "duration": 14, "message": "\nStarted /apps/ThinkAnalytics/ModelAnalysis/bin/buildModelAndRefreshMemory.sh Mon Aug 28 15:45:09 CEST 2017\nStarted /apps/ThinkAnalytics/ModelAnalysis/bin/buildSocialModel.sh Mon Aug 28 15:45:09 CEST 2017\nBuildfile: install-run.xml\n\ncheck_kwiz_libs:\n\nrunVODSubscriberPrepareBuildData:\n[Java[RunPlanTask]] java.lang.Exception: Failed to run plan:VODSubscriberPrepareBuildData\n[Java[RunPlanTask]] \tat com.thinkanalytics.re.ant.RunPlan.execute(RunPlan.java:279)\n[Java[RunPlanTask]] \tat com.thinkanalytics.re.ant.RunPlan.main(RunPlan.java:370)\n[Java[RunPlanTask]] \tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n[Java[RunPlanTask]] \tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)\n[Java[RunPlanTask]] \tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n[Java[RunPlanTask]] \tat java.lang.reflect.Method.invoke(Method.java:606)\n[Java[RunPlanTask]] \tat com.kwiz.kd.util.KwizLoader.main(Unknown Source)\n\nBUILD FAILED\n/app/apps/ThinkAnalytics/ModelAnalysis/setup/install-run.xml:20: Failed to run plan (VODSubscriberPrepareBuildData) reason:Java returned: 1\n\nTotal time: 12 seconds\n[28/08/2017-15:45:23-CEST] ERROR - FAILED to run social model building.\n[28/08/2017-15:45:23-CEST] ERROR - FAILED to rebuild the social model\n" })
print graph.serialize(format='turtle') # Example of usage : # /home/cgueret/Code/CEDAR/DataDump-mini-vt/config.ini VT_1879_01_H1-S0-S1019-h if __name__ == '__main__': # Configure a logger root_logger = logging.getLogger('') root_logger.setLevel(logging.INFO) logFormat = '%(asctime)s %(name)-12s %(levelname)-8s %(message)s' ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(logFormat)) root_logger.addHandler(ch) # Parse the command line parser = argparse.ArgumentParser(description='Extract the provenance of an harmonized observation') parser.add_argument('configuration', metavar='configuration', type=str, help='The configuration file used for Integrator') parser.add_argument('resource', metavar='resource', type=str, help='The resource to track the provenance of') args = parser.parse_args() # Get the provenance of the resource configuration = Configuration(args.configuration) data_ns = rdflib.namespace.Namespace(configuration.get_namespace('data')) provTracker = ProvenanceTracker(configuration) provTracker.track(data_ns[args.resource])
class TraxisFrontEndParsingTestCase(BaseMultipleMessageParsingTestCase): event_creators = create_event_creators(Configuration(dict={"timezone": {"name": "Europe/Amsterdam"}})) def test_traxis_service_log(self): self.assert_parsing( { "source": "TraxisService.log", "message": "2017-09-29 11:30:51,656 VERBOSE [184] QueryContext - Normalized query for caching = <Request xmlns=\"urn:eventis:traxisweb:1.0\"><Parameters><Parameter name=\"User-Agent\">recording-service/0.23.0 TentacleClient/5.4.0 Jersey/2.25.1</Parameter><Parameter name=\"language\">en</Parameter></Parameters><ResourcesQuery resourceType=\"event\"><ResourceIds><ResourceId>crid://telenet.be/996d88df-9327-4cfd-84b8-0f61648f42ad,imi:0010000000033E93</ResourceId></ResourceIds><Options><Option type=\"props\">durationinseconds,availabilitystart,availabilityend</Option></Options><SubQueries><SubQuery relationName=\"titles\"><Options><Option type=\"props\">episodename,isadult,name,ordinal,pictures,ratings,minimumage,longsynopsis</Option></Options><SubQueries><SubQuery relationName=\"seriescollection\"><Options><Option type=\"props\">relationordinal,type,name</Option></Options><SubQueries><SubQuery relationName=\"parentseriescollection\"><Options><Option type=\"props\">relationordinal,type,name</Option></Options></SubQuery></SubQueries></SubQuery></SubQueries></SubQuery></SubQueries></ResourcesQuery></Request>" }, { "@timestamp": datetime(2017, 9, 29, 11, 30, 51, 656000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "VERBOSE", "thread_name": "184", "component": "QueryContext", "message": "Normalized query for caching = <Request xmlns=\"urn:eventis:traxisweb:1.0\"><Parameters><Parameter name=\"User-Agent\">recording-service/0.23.0 TentacleClient/5.4.0 Jersey/2.25.1</Parameter><Parameter name=\"language\">en</Parameter></Parameters><ResourcesQuery resourceType=\"event\"><ResourceIds><ResourceId>crid://telenet.be/996d88df-9327-4cfd-84b8-0f61648f42ad,imi:0010000000033E93</ResourceId></ResourceIds><Options><Option type=\"props\">durationinseconds,availabilitystart,availabilityend</Option></Options><SubQueries><SubQuery relationName=\"titles\"><Options><Option type=\"props\">episodename,isadult,name,ordinal,pictures,ratings,minimumage,longsynopsis</Option></Options><SubQueries><SubQuery relationName=\"seriescollection\"><Options><Option type=\"props\">relationordinal,type,name</Option></Options><SubQueries><SubQuery relationName=\"parentseriescollection\"><Options><Option type=\"props\">relationordinal,type,name</Option></Options></SubQuery></SubQueries></SubQuery></SubQueries></SubQuery></SubQueries></ResourcesQuery></Request>" } ) def test_traxis_service_log_method_duration(self): self.assert_parsing( { "source": "TraxisService.log", "message": "2017-06-29 16:35:33,468 DEBUG [HTTP worker thread 15] EntitlementManager - [10.64.13.180:39428] [RequestId = f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab] [CustomerId = 58a88a40-4d12-11e7-85f5-e5a72ae6734d_nl] Executing method 'GetEntitlementForProduct' took '17' ms" }, { "@timestamp": datetime(2017, 6, 29, 16, 35, 33, 468000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "DEBUG", "thread_name": "HTTP worker thread 15", "component": "EntitlementManager", "ip": "10.64.13.180:39428", "request-id": "f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab", "obo-customer-id": "58a88a40-4d12-11e7-85f5-e5a72ae6734d_nl", "method": "GetEntitlementForProduct", "duration": "17", "message": "[10.64.13.180:39428] [RequestId = f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab] [CustomerId = 58a88a40-4d12-11e7-85f5-e5a72ae6734d_nl] Executing method 'GetEntitlementForProduct' took '17' ms" } ) def test_traxis_service_log_method_invoked(self): self.assert_parsing( { "source": "TraxisService.log", "message": "2017-06-29 16:35:33,468 DEBUG [HTTP worker thread 15] EntitlementManager - [10.64.13.180:39428] [RequestId = f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab] [CustomerId = 58a88a40-4d12-11e7-85f5-e5a72ae6734d_nl] Method 'GetOffers' invoked with parameters: identity = Eventis.Traxis.BusinessLogicLayer.Identity, productId = crid://eventis.nl/00000000-0000-1000-0008-000100000000, twoLetterIsoLanguageCode = en" }, { "@timestamp": datetime(2017, 6, 29, 16, 35, 33, 468000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "DEBUG", "thread_name": "HTTP worker thread 15", "component": "EntitlementManager", "ip": "10.64.13.180:39428", "request-id": "f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab", "obo-customer-id": "58a88a40-4d12-11e7-85f5-e5a72ae6734d_nl", "method": "GetOffers", "identity": "Eventis.Traxis.BusinessLogicLayer.Identity", "productId": "crid://eventis.nl/00000000-0000-1000-0008-000100000000", "message": "[10.64.13.180:39428] [RequestId = f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab] [CustomerId = 58a88a40-4d12-11e7-85f5-e5a72ae6734d_nl] Method 'GetOffers' invoked with parameters: identity = Eventis.Traxis.BusinessLogicLayer.Identity, productId = crid://eventis.nl/00000000-0000-1000-0008-000100000000, twoLetterIsoLanguageCode = en" } ) def test_traxis_service_log_query_metrics_with_requester_id_and_customer_id(self): self.assert_parsing( { "source": "TraxisService.log", "message": "2017-06-29 16:35:33,468 DEBUG [HTTP worker thread 15] EntitlementManager - [10.64.13.180:39428] [RequestId = f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab] [CustomerId = 58a88a40-4d12-11e7-85f5-e5a72ae6734d_nl] QueryMetrics: ResponseTimeInMilliseconds = 3, QueueTimeInMilliseconds = 0, ResponseLengthInBytes = 2195, CassandraRequestCount = 0, CassandraRequestTotalResponseTimeInMicroseconds = , CassandraRequestAverageResponseTimeInMicroseconds = , ExternalRequestCount = 0, ExternalRequestTotalResponseTimeInMilliseconds = , ResourceEvaluationCount = 1" }, { "@timestamp": datetime(2017, 6, 29, 16, 35, 33, 468000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "DEBUG", "thread_name": "HTTP worker thread 15", "component": "EntitlementManager", "ip": "10.64.13.180:39428", "request-id": "f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab", "obo-customer-id": "58a88a40-4d12-11e7-85f5-e5a72ae6734d_nl", "response_time_in_milliseconds": 3, "queue_time_in_milliseconds": 0, "response_length_in_bytes": 2195, "cassandra_request_count": 0, "external_request_count": 0, "resource_evaluation_count": 1, "query_metrics": " ResponseTimeInMilliseconds = 3, QueueTimeInMilliseconds = 0, ResponseLengthInBytes = 2195, CassandraRequestCount = 0, CassandraRequestTotalResponseTimeInMicroseconds = , CassandraRequestAverageResponseTimeInMicroseconds = , ExternalRequestCount = 0, ExternalRequestTotalResponseTimeInMilliseconds = , ResourceEvaluationCount = 1", "message": "[10.64.13.180:39428] [RequestId = f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab] [CustomerId = 58a88a40-4d12-11e7-85f5-e5a72ae6734d_nl] QueryMetrics: ResponseTimeInMilliseconds = 3, QueueTimeInMilliseconds = 0, ResponseLengthInBytes = 2195, CassandraRequestCount = 0, CassandraRequestTotalResponseTimeInMicroseconds = , CassandraRequestAverageResponseTimeInMicroseconds = , ExternalRequestCount = 0, ExternalRequestTotalResponseTimeInMilliseconds = , ResourceEvaluationCount = 1" } ) def test_traxis_service_log_query_metrics_with_requester_id_and_customer_id_in_revert_order(self): self.assert_parsing( { "source": "TraxisService.log", "message": "2017-06-29 16:35:33,468 DEBUG [HTTP worker thread 15] EntitlementManager - [10.64.13.180:39428] [CustomerId = 58a88a40-4d12-11e7-85f5-e5a72ae6734d_nl] [RequestId = f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab] QueryMetrics: ResponseTimeInMilliseconds = 3, QueueTimeInMilliseconds = 0, ResponseLengthInBytes = 2195, CassandraRequestCount = 0, CassandraRequestTotalResponseTimeInMicroseconds = , CassandraRequestAverageResponseTimeInMicroseconds = , ExternalRequestCount = 0, ExternalRequestTotalResponseTimeInMilliseconds = , ResourceEvaluationCount = 1" }, { "@timestamp": datetime(2017, 6, 29, 16, 35, 33, 468000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "DEBUG", "thread_name": "HTTP worker thread 15", "component": "EntitlementManager", "ip": "10.64.13.180:39428", "request-id": "f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab", "obo-customer-id": "58a88a40-4d12-11e7-85f5-e5a72ae6734d_nl", "response_time_in_milliseconds": 3, "queue_time_in_milliseconds": 0, "response_length_in_bytes": 2195, "cassandra_request_count": 0, "external_request_count": 0, "resource_evaluation_count": 1, "query_metrics": " ResponseTimeInMilliseconds = 3, QueueTimeInMilliseconds = 0, ResponseLengthInBytes = 2195, CassandraRequestCount = 0, CassandraRequestTotalResponseTimeInMicroseconds = , CassandraRequestAverageResponseTimeInMicroseconds = , ExternalRequestCount = 0, ExternalRequestTotalResponseTimeInMilliseconds = , ResourceEvaluationCount = 1", "message": "[10.64.13.180:39428] [CustomerId = 58a88a40-4d12-11e7-85f5-e5a72ae6734d_nl] [RequestId = f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab] QueryMetrics: ResponseTimeInMilliseconds = 3, QueueTimeInMilliseconds = 0, ResponseLengthInBytes = 2195, CassandraRequestCount = 0, CassandraRequestTotalResponseTimeInMicroseconds = , CassandraRequestAverageResponseTimeInMicroseconds = , ExternalRequestCount = 0, ExternalRequestTotalResponseTimeInMilliseconds = , ResourceEvaluationCount = 1" } ) def test_traxis_service_log_query_metrics_with_only_query_metrics(self): self.assert_parsing( { "source": "TraxisService.log", "message": "2017-06-29 16:35:33,468 DEBUG [HTTP worker thread 15] EntitlementManager - [10.64.13.180:39428] QueryMetrics: ResponseTimeInMilliseconds = 3, QueueTimeInMilliseconds = 0, ResponseLengthInBytes = 2195, CassandraRequestCount = 0, CassandraRequestTotalResponseTimeInMicroseconds = , CassandraRequestAverageResponseTimeInMicroseconds = , ExternalRequestCount = 0, ExternalRequestTotalResponseTimeInMilliseconds = , ResourceEvaluationCount = 1" }, { "@timestamp": datetime(2017, 6, 29, 16, 35, 33, 468000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "DEBUG", "thread_name": "HTTP worker thread 15", "component": "EntitlementManager", "ip": "10.64.13.180:39428", "response_time_in_milliseconds": 3, "queue_time_in_milliseconds": 0, "response_length_in_bytes": 2195, "cassandra_request_count": 0, "external_request_count": 0, "resource_evaluation_count": 1, "query_metrics": " ResponseTimeInMilliseconds = 3, QueueTimeInMilliseconds = 0, ResponseLengthInBytes = 2195, CassandraRequestCount = 0, CassandraRequestTotalResponseTimeInMicroseconds = , CassandraRequestAverageResponseTimeInMicroseconds = , ExternalRequestCount = 0, ExternalRequestTotalResponseTimeInMilliseconds = , ResourceEvaluationCount = 1", "message": "[10.64.13.180:39428] QueryMetrics: ResponseTimeInMilliseconds = 3, QueueTimeInMilliseconds = 0, ResponseLengthInBytes = 2195, CassandraRequestCount = 0, CassandraRequestTotalResponseTimeInMicroseconds = , CassandraRequestAverageResponseTimeInMicroseconds = , ExternalRequestCount = 0, ExternalRequestTotalResponseTimeInMilliseconds = , ResourceEvaluationCount = 1" } ) def test_traxis_service_log_query_metrics_with_request_id(self): self.assert_parsing( { "source": "TraxisService.log", "message": "2017-06-29 16:35:33,468 DEBUG [HTTP worker thread 15] EntitlementManager - [10.64.13.180:39428] [RequestId = f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab] QueryMetrics: ResponseTimeInMilliseconds = 3, QueueTimeInMilliseconds = 0, ResponseLengthInBytes = 2195, CassandraRequestCount = 0, CassandraRequestTotalResponseTimeInMicroseconds = , CassandraRequestAverageResponseTimeInMicroseconds = , ExternalRequestCount = 0, ExternalRequestTotalResponseTimeInMilliseconds = , ResourceEvaluationCount = 1" }, { "@timestamp": datetime(2017, 6, 29, 16, 35, 33, 468000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "DEBUG", "thread_name": "HTTP worker thread 15", "component": "EntitlementManager", "ip": "10.64.13.180:39428", "request-id": "f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab", "response_time_in_milliseconds": 3, "queue_time_in_milliseconds": 0, "response_length_in_bytes": 2195, "cassandra_request_count": 0, "external_request_count": 0, "resource_evaluation_count": 1, "query_metrics": " ResponseTimeInMilliseconds = 3, QueueTimeInMilliseconds = 0, ResponseLengthInBytes = 2195, CassandraRequestCount = 0, CassandraRequestTotalResponseTimeInMicroseconds = , CassandraRequestAverageResponseTimeInMicroseconds = , ExternalRequestCount = 0, ExternalRequestTotalResponseTimeInMilliseconds = , ResourceEvaluationCount = 1", "message": "[10.64.13.180:39428] [RequestId = f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab] QueryMetrics: ResponseTimeInMilliseconds = 3, QueueTimeInMilliseconds = 0, ResponseLengthInBytes = 2195, CassandraRequestCount = 0, CassandraRequestTotalResponseTimeInMicroseconds = , CassandraRequestAverageResponseTimeInMicroseconds = , ExternalRequestCount = 0, ExternalRequestTotalResponseTimeInMilliseconds = , ResourceEvaluationCount = 1" }) def test_traxis_service_log_query_metrics_with_customer_id(self): self.assert_parsing( { "source": "TraxisService.log", "message": "2017-06-29 16:35:33,468 DEBUG [HTTP worker thread 15] EntitlementManager - [10.64.13.180:39428] [CustomerId = f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab] QueryMetrics: ResponseTimeInMilliseconds = 3, QueueTimeInMilliseconds = 0, ResponseLengthInBytes = 2195, CassandraRequestCount = 0, CassandraRequestTotalResponseTimeInMicroseconds = , CassandraRequestAverageResponseTimeInMicroseconds = , ExternalRequestCount = 0, ExternalRequestTotalResponseTimeInMilliseconds = , ResourceEvaluationCount = 1" }, { "@timestamp": datetime(2017, 6, 29, 16, 35, 33, 468000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "DEBUG", "thread_name": "HTTP worker thread 15", "component": "EntitlementManager", "ip": "10.64.13.180:39428", "obo-customer-id": "f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab", "response_time_in_milliseconds": 3, "queue_time_in_milliseconds": 0, "response_length_in_bytes": 2195, "cassandra_request_count": 0, "external_request_count": 0, "resource_evaluation_count": 1, "query_metrics": " ResponseTimeInMilliseconds = 3, QueueTimeInMilliseconds = 0, ResponseLengthInBytes = 2195, CassandraRequestCount = 0, CassandraRequestTotalResponseTimeInMicroseconds = , CassandraRequestAverageResponseTimeInMicroseconds = , ExternalRequestCount = 0, ExternalRequestTotalResponseTimeInMilliseconds = , ResourceEvaluationCount = 1", "message": "[10.64.13.180:39428] [CustomerId = f14d79a5-357e-4b6f-bcb7-ed2b00fd63ab] QueryMetrics: ResponseTimeInMilliseconds = 3, QueueTimeInMilliseconds = 0, ResponseLengthInBytes = 2195, CassandraRequestCount = 0, CassandraRequestTotalResponseTimeInMicroseconds = , CassandraRequestAverageResponseTimeInMicroseconds = , ExternalRequestCount = 0, ExternalRequestTotalResponseTimeInMilliseconds = , ResourceEvaluationCount = 1" }) def test_traxis_service_log_cannot_purchase_product(self): self.assert_parsing( { "source": "TraxisService.log", "message": "2017-06-29 16:35:25,640 DEBUG [HTTP worker thread 2] BaseEntitlementManager - [10.64.13.180:39376] [RequestId = 0cc3c8cf-f3b3-4660-9a8c-54e5461106c9] [CustomerId = be73f580-5cc6-11e7-acce-916590705404_nl] Cannot purchase products of type 'Subscription': subscription purchase is not enabled. CustomerId 'be73f580-5cc6-11e7-acce-916590705404_nl', productId 'crid://eventis.nl/00000000-0000-1000-0008-000100000001'" }, { "@timestamp": datetime(2017, 6, 29, 16, 35, 25, 640000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "DEBUG", "thread_name": "HTTP worker thread 2", "component": "BaseEntitlementManager", "ip": "10.64.13.180:39376", "request-id": "0cc3c8cf-f3b3-4660-9a8c-54e5461106c9", "obo-customer-id": "be73f580-5cc6-11e7-acce-916590705404_nl", "productId": "crid://eventis.nl/00000000-0000-1000-0008-000100000001", "message": "[10.64.13.180:39376] [RequestId = 0cc3c8cf-f3b3-4660-9a8c-54e5461106c9] [CustomerId = be73f580-5cc6-11e7-acce-916590705404_nl] Cannot purchase products of type 'Subscription': subscription purchase is not enabled. CustomerId 'be73f580-5cc6-11e7-acce-916590705404_nl', productId 'crid://eventis.nl/00000000-0000-1000-0008-000100000001'" } ) def test_traxis_service_log_x_req_id(self): self.assert_parsing( { "source": "TraxisService.log", "message": "2018-04-10 08:52:34,478 WARN [HTTP worker thread 17] ClassificationSchemeLookupTable - [172.23.41.87, 10.95.97.62] [RequestId = aa0b54f9-8937-4329-9150-0db6343b0cbc] Genre 'urn:tva:metadata:cs:UPCEventGenreCS:2009:46' is not known\nOriginal message from 10.95.96.116:49790\nMethod = Post\nUri = http://traxis-web/traxis/web\nHeaders = \n x-request-id: a877520b711186cad77077e328fadd82\n x-application-name: hollow-producer\n X-B3-TraceId: 11e192c37af66bb4\n X-B3-SpanId: 11e192c37af66bb4\n X-B3-Sampled: 0\n X-Forwarded-For: 172.23.41.87, 10.95.97.62\n Content-Length: 1391\n Content-Type: application/xml\n Accept-Encoding: gzip\n Host: traxis-web\n User-Agent: hollow-producer/1.11.01 TentacleClient/5.15.1 Jersey/2.25.1\nBody = \u003c?xml version=\"1.0\" encoding=\"utf-8\"?\u003e\n\u003cRequest xmlns=\"urn:eventis:traxisweb:1.0\"\u003e\n \u003cParameters\u003e\n \u003cParameter name=\"Language\"\u003enl\u003c/Parameter\u003e\n \u003c/Parameters\u003e\n \u003cRootRelationQuery relationName=\"Titles\"\u003e\n \u003cOptions\u003e\n \u003cOption type=\"Paging\"\u003e35000,5000,rc\u003c/Option\u003e\n \u003cOption type=\"Props\"\u003e\n Name,\n Pictures,\n EpisodeName,\n SeriesCollection,\n MinimumAge,\n DurationInSeconds,\n SortName,\n ShortSynopsis,\n LongSynopsis,\n ContentProviderId,\n Categories,\n IsAdult,\n ProductionDate,\n Credits,\n Genres,\n AllGenres,\n ActorsCharacters,\n DirectorNames,\n IsTstv,\n ProductionLocations,\n StreamingPopularityDay,\n StreamingPopularityWeek,\n StreamingPopularityMonth\n \u003c/Option\u003e\n \u003c/Options\u003e\n \u003cSubQueries\u003e\n \u003cSubQuery relationName=\"SeriesCollection\"\u003e\n \u003cOptions\u003e\n \u003cOption type=\"props\"\u003e\n RelationOrdinal\n \u003c/Option\u003e\n \u003c/Options\u003e\n \u003c/SubQuery\u003e\n \u003c/SubQueries\u003e\n \u003c/RootRelationQuery\u003e\n\u003c/Request\u003e\n" }, { "@timestamp": datetime(2018, 4, 10, 8, 52, 34, 478000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "WARN", "thread_name": "HTTP worker thread 17", "component": "ClassificationSchemeLookupTable", "ip": "172.23.41.87, 10.95.97.62", "request-id": "aa0b54f9-8937-4329-9150-0db6343b0cbc", "x-request-id": "a877520b711186cad77077e328fadd82", "message": "[172.23.41.87, 10.95.97.62] [RequestId = aa0b54f9-8937-4329-9150-0db6343b0cbc] Genre 'urn:tva:metadata:cs:UPCEventGenreCS:2009:46' is not known\nOriginal message from 10.95.96.116:49790\nMethod = Post\nUri = http://traxis-web/traxis/web\nHeaders = \n x-request-id: a877520b711186cad77077e328fadd82\n x-application-name: hollow-producer\n X-B3-TraceId: 11e192c37af66bb4\n X-B3-SpanId: 11e192c37af66bb4\n X-B3-Sampled: 0\n X-Forwarded-For: 172.23.41.87, 10.95.97.62\n Content-Length: 1391\n Content-Type: application/xml\n Accept-Encoding: gzip\n Host: traxis-web\n User-Agent: hollow-producer/1.11.01 TentacleClient/5.15.1 Jersey/2.25.1\nBody = \u003c?xml version=\"1.0\" encoding=\"utf-8\"?\u003e\n\u003cRequest xmlns=\"urn:eventis:traxisweb:1.0\"\u003e\n \u003cParameters\u003e\n \u003cParameter name=\"Language\"\u003enl\u003c/Parameter\u003e\n \u003c/Parameters\u003e\n \u003cRootRelationQuery relationName=\"Titles\"\u003e\n \u003cOptions\u003e\n \u003cOption type=\"Paging\"\u003e35000,5000,rc\u003c/Option\u003e\n \u003cOption type=\"Props\"\u003e\n Name,\n Pictures,\n EpisodeName,\n SeriesCollection,\n MinimumAge,\n DurationInSeconds,\n SortName,\n ShortSynopsis,\n LongSynopsis,\n ContentProviderId,\n Categories,\n IsAdult,\n ProductionDate,\n Credits,\n Genres,\n AllGenres,\n ActorsCharacters,\n DirectorNames,\n IsTstv,\n ProductionLocations,\n StreamingPopularityDay,\n StreamingPopularityWeek,\n StreamingPopularityMonth\n \u003c/Option\u003e\n \u003c/Options\u003e\n \u003cSubQueries\u003e\n \u003cSubQuery relationName=\"SeriesCollection\"\u003e\n \u003cOptions\u003e\n \u003cOption type=\"props\"\u003e\n RelationOrdinal\n \u003c/Option\u003e\n \u003c/Options\u003e\n \u003c/SubQuery\u003e\n \u003c/SubQueries\u003e\n \u003c/RootRelationQuery\u003e\n\u003c/Request\u003e\n" } ) def test_traxis_service_error_log(self): self.assert_parsing( { "source": "TraxisServiceError.log", "message": "2017-11-07 19:30:19,669 ERROR [169] MachineTimeCheck - [Task = Eventis.Traxis.Service.Ntp.MachineTimeCheck] Eventis.Traxis.Service.ServiceException+NetworkTimeCheckError: NetworkTime error: Time difference between this machine and machine '10.95.97.60' is '17780' ms. This exceeds the configured threshold of '2000' ms" }, { "@timestamp": datetime(2017, 11, 07, 19, 30, 19, 669000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "ERROR", "thread_name": "169", "component": "MachineTimeCheck", "message": "[Task = Eventis.Traxis.Service.Ntp.MachineTimeCheck] Eventis.Traxis.Service.ServiceException+NetworkTimeCheckError: NetworkTime error: Time difference between this machine and machine '10.95.97.60' is '17780' ms. This exceeds the configured threshold of '2000' ms" } ) def test_traxis_service_scheduler_log(self): self.assert_parsing( { "source": "TraxisServiceDistributedScheduler.log", "message": "2017-09-29 14:41:58,832 DEBUG [71] DistributedScheduler - [Task = DistributedScheduler.Master] Machines in up state: BE-W-P-OBO00170, BE-W-P-OBO00173, BE-W-P-OBO00174" }, { "@timestamp": datetime(2017, 9, 29, 14, 41, 58, 832000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "DEBUG", "thread_name": "71", "component": "DistributedScheduler", "message": "[Task = DistributedScheduler.Master] Machines in up state: BE-W-P-OBO00170, BE-W-P-OBO00173, BE-W-P-OBO00174" } ) def test_traxis_service_management_log(self): self.assert_parsing( { "source": "TraxisServiceLogManagement.log", "message": "2017-11-14 14:54:35,666 VERBOSE [HTTP worker thread 13] LogManager - [81.82.50.176] [RequestId = 590b040e-a8ae-47ee-969c-58a213999c09] [CustomerId = c9bde815-d03b-46ef-abfe-0b2802116338_be] Executing method 'get_SessionLogger' took '0' ms" }, { "@timestamp": datetime(2017, 11, 14, 14, 54, 35, 666000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "VERBOSE", "thread_name": "HTTP worker thread 13", "component": "LogManager", "message": "[81.82.50.176] [RequestId = 590b040e-a8ae-47ee-969c-58a213999c09] [CustomerId = c9bde815-d03b-46ef-abfe-0b2802116338_be] Executing method 'get_SessionLogger' took '0' ms", "duration": "0", "ip": "81.82.50.176", "method": "get_SessionLogger", "obo-customer-id": "c9bde815-d03b-46ef-abfe-0b2802116338_be", "request-id": "590b040e-a8ae-47ee-969c-58a213999c09" } ) def test_traxis_for_extra_spaces(self): self.assert_parsing( { "source": "TraxisServiceLogManagement.log", "message": "2018-01-31 09:20:41,979 INFO [ResponseCache.Refresh] ResponseCache - [Task = ResponseCache.Refresh] Refreshing '482' queries took '55224' ms" }, { "@timestamp": datetime(2018, 1, 31, 9, 20, 41, 979000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "thread_name": "ResponseCache.Refresh", "component": "ResponseCache", "message": "[Task = ResponseCache.Refresh] Refreshing '482' queries took '55224' ms" } )
#!/usr/bin/python2 from integrator.modules.tablinker.tablinker import TabLinker from util.configuration import Configuration import logging if __name__ == '__main__': # Load the configuration file config = Configuration('config.ini') # Configure the logger root_logger = logging.getLogger('') root_logger.setLevel(logging.DEBUG if config.verbose() else logging.INFO) logFormat = '%(asctime)s %(name)-18s %(levelname)-8s %(message)s' ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(logFormat)) root_logger.addHandler(ch) #dataset = config.getURI('cedar',"BRT_1889_02_T1-S0") #dataset = config.getURI('cedar',"VT_1869_01_H1-S0") #dataset = config.getURI('cedar','VT_1879_01_H1-S0') #dataset = config.getURI('cedar','VT_1859_01_H1-S6') filename = 'VT_1899_07_H1.ods' # Test tabLinker = TabLinker(config.get_path('source-data') + '/' + filename, "/tmp/data.ttl", processAnnotations=True) tabLinker.set_target_namespace(config.get_namespace('data')) tabLinker.set_compress(config.isCompress()) tabLinker.doLink()
class UServicesParsingTestCase(BaseMultipleMessageParsingTestCase): event_creators = create_event_creators( Configuration(dict={"timezone": { "name": "Europe/Amsterdam" }})) def test_uservices_log(self): self.assert_parsing( { "message": json.dumps({ "app": "gateway", "x-request-id": "2bb963ae-d492-41a2-b8f8-a94d8810248a", "stack": "gateway-ch", "offset": 87181738, "log": "", "json_error": "Key 'log' not found", "input_type": "log", "time_local": "21/Mar/2018:14:16:05 +0100", "source": "/app/nginx/access.log", "x-forwarded-for": "172.23.41.84", "message": "172.16.96.133 - - [21/Mar/2018:14:16:05 +0100] \"GET /reng/RE/REController.do?contentSourceId=1&clientType=399&method=lgiAdaptiveSearch&subscriberId=6b10a120-ce10-11e7-8a8a-5fa419531185_ch%23MasterProfile&term=News&lgiContentItemInstanceId=%5B1%5Dimi%3A00100000001950EC&contentItemId=%5B2%5Dcrid%3A%2F%2Ftelenet.be%2Ffcdf57b1-1cc4-4e13-ad34-c348b26315c7&startResults=0&maxResults=1&applyMarketingBias=true&filterVodAvailableNow=true HTTP/1.1\" 200 636", "type": "uservices_log_in", "x-dev": "-", "@timestamp": "2018-03-21T13:16:06.753Z", "x-auth-id": "-", "beat": { "hostname": "lg-l-p-obo00579", "name": "lg-l-p-obo00579", "version": "5.6.5" }, "host": "lg-l-p-obo00579", "@version": "1", "http": { "referer": "-", "request": "/reng/RE/REController.do?contentSourceId=1&clientType=399&method=lgiAdaptiveSearch&subscriberId=6b10a120-ce10-11e7-8a8a-5fa419531185_ch%23MasterProfile&term=News&lgiContentItemInstanceId=%5B1%5Dimi%3A00100000001950EC&contentItemId=%5B2%5Dcrid%3A%2F%2Ftelenet.be%2Ffcdf57b1-1cc4-4e13-ad34-c348b26315c7&startResults=0&maxResults=1&applyMarketingBias=true&filterVodAvailableNow=true", "method": "GET", "local_port": "80", "useragent": "discovery-service/0.25.1 TentacleClient/5.10.0 Jersey/2.25.1", "proxy_to": "reng:80", "duration": 0.018, "protocol": "HTTP/1.1", "urlpath": "/reng/RE/REController.do", "urlquery": "contentSourceId=1&clientType=399&method=lgiAdaptiveSearch&subscriberId=6b10a120-ce10-11e7-8a8a-5fa419531185_ch%23MasterProfile&term=News&lgiContentItemInstanceId=%5B1%5Dimi%3A00100000001950EC&contentItemId=%5B2%5Dcrid%3A%2F%2Ftelenet.be%2Ffcdf57b1-1cc4-4e13-ad34-c348b26315c7&startResults=0&maxResults=1&applyMarketingBias=true&filterVodAvailableNow=true", "bytes": 636, "clientip": "172.16.96.133", "domain": "gateway", "status": 200 }, "upstream_response_time": "0.018", "x-cus": "-" }) }, { "app": "gateway", "x-request-id": "2bb963ae-d492-41a2-b8f8-a94d8810248a", "stack": "gateway-ch", "offset": 87181738, "log": "", "json_error": "Key 'log' not found", "input_type": "log", "time_local": "21/Mar/2018:14:16:05 +0100", "source": "/app/nginx/access.log", "x-forwarded-for": "172.23.41.84", "message": "172.16.96.133 - - [21/Mar/2018:14:16:05 +0100] \"GET /reng/RE/REController.do?contentSourceId=1&clientType=399&method=lgiAdaptiveSearch&subscriberId=6b10a120-ce10-11e7-8a8a-5fa419531185_ch%23MasterProfile&term=News&lgiContentItemInstanceId=%5B1%5Dimi%3A00100000001950EC&contentItemId=%5B2%5Dcrid%3A%2F%2Ftelenet.be%2Ffcdf57b1-1cc4-4e13-ad34-c348b26315c7&startResults=0&maxResults=1&applyMarketingBias=true&filterVodAvailableNow=true HTTP/1.1\" 200 636", "type": "uservices_log_in", "x-dev": "-", "@timestamp": datetime(2018, 3, 21, 13, 16, 6, 753000).replace(tzinfo=timezones["Europe/Amsterdam"]), "x-auth-id": "-", "beat": { "hostname": "lg-l-p-obo00579", "name": "lg-l-p-obo00579", "version": "5.6.5" }, "host": "lg-l-p-obo00579", "@version": "1", "http_referer": "-", "http_request": "/reng/RE/REController.do?contentSourceId=1&clientType=399&method=lgiAdaptiveSearch&subscriberId=6b10a120-ce10-11e7-8a8a-5fa419531185_ch%23MasterProfile&term=News&lgiContentItemInstanceId=%5B1%5Dimi%3A00100000001950EC&contentItemId=%5B2%5Dcrid%3A%2F%2Ftelenet.be%2Ffcdf57b1-1cc4-4e13-ad34-c348b26315c7&startResults=0&maxResults=1&applyMarketingBias=true&filterVodAvailableNow=true", "http_method": "GET", "http_local_port": "80", "http_useragent": "discovery-service/0.25.1 TentacleClient/5.10.0 Jersey/2.25.1", "http_proxy_to": "reng:80", "http_duration": 0.018, "http_protocol": "HTTP/1.1", "http_urlpath": "/reng/RE/REController.do", "http_urlquery": "contentSourceId=1&clientType=399&method=lgiAdaptiveSearch&subscriberId=6b10a120-ce10-11e7-8a8a-5fa419531185_ch%23MasterProfile&term=News&lgiContentItemInstanceId=%5B1%5Dimi%3A00100000001950EC&contentItemId=%5B2%5Dcrid%3A%2F%2Ftelenet.be%2Ffcdf57b1-1cc4-4e13-ad34-c348b26315c7&startResults=0&maxResults=1&applyMarketingBias=true&filterVodAvailableNow=true", "content_source_id": "1", "client_type": "399", "method": "lgiAdaptiveSearch", "subscriber_id": "6b10a120-ce10-11e7-8a8a-5fa419531185_ch#MasterProfile", "term": "News", "lgi_content_item_instance_id": "[1]imi:00100000001950EC", "content_item_id": "[2]crid://telenet.be/fcdf57b1-1cc4-4e13-ad34-c348b26315c7", "start_results": "0", "max_results": "1", "apply_marketing_bias": "true", "filter_vod_available_now": "true", "http_bytes": 636, "http_clientip": "172.16.96.133", "http_domain": "gateway", "http_status": 200, "upstream_response_time": "0.018", "x-cus": "-" }) def test_uservices_log_parse_and_clean_subscriber_id(self): self.assert_parsing( { "message": json.dumps({ "http": { "urlquery": "subscriberId=1e1e8640-17b0-11e8-93dc-e71e3a262bec_de%23MasterProfile", }, }) }, { "http_urlquery": "subscriberId=1e1e8640-17b0-11e8-93dc-e71e3a262bec_de%23MasterProfile", "subscriber_id": "1e1e8640-17b0-11e8-93dc-e71e3a262bec_de#MasterProfile", }) def test_uservices_log_parse_and_clean_content_item_id_with_telenet_be( self): self.assert_parsing( { "message": json.dumps({ "http": { "urlquery": "contentItemId=%5B2%5Dcrid%3A%2F%2Ftelenet.be%2Fdc8f839f-dd41-49bf-842f-e6208e08ee02", } }) }, { "http_urlquery": "contentItemId=%5B2%5Dcrid%3A%2F%2Ftelenet.be%2Fdc8f839f-dd41-49bf-842f-e6208e08ee02", "content_item_id": "[2]crid://telenet.be/dc8f839f-dd41-49bf-842f-e6208e08ee02", }) def test_uservices_log_parse_content_item_id_without_telenet_be(self): self.assert_parsing( { "message": json.dumps({ "http": { "urlquery": "contentItemId=crid%3A%2F%2Fbds.tv%2F134877645", }, }) }, { "http_urlquery": "contentItemId=crid%3A%2F%2Fbds.tv%2F134877645", "content_item_id": "crid://bds.tv/134877645", }) def test_uservices_log_parse_and_clean_lgi_content_item_instance_id(self): self.assert_parsing( { "message": json.dumps({ "http": { "urlquery": "lgiContentItemInstanceId=%5B1%5Dimi%3A0010000000185C2C", } }) }, { "http_urlquery": "lgiContentItemInstanceId=%5B1%5Dimi%3A0010000000185C2C", "lgi_content_item_instance_id": "[1]imi:0010000000185C2C", }) def test_extract_bookings_api_method(self): self.assert_parsing( { "message": json.dumps({ "app": "recording-service", "header": { "x-original-uri": "/recording-service/customers/1a2a5370-dc2d-11e7-a33c-8ffe245e867f_nl/bookings?language=nl&limit=2147483647&isAdult=false", } }) }, { "app": "recording-service", "header_x-original-uri": "/recording-service/customers/1a2a5370-dc2d-11e7-a33c-8ffe245e867f_nl/bookings?language=nl&limit=2147483647&isAdult=false", "api_method": "bookings" }) def test_extract_recordings_api_method(self): self.assert_parsing( { "message": json.dumps({ "app": "recording-service", "header": { "x-original-uri": "/recording-service/customers/93071743-eb84-4f01-9d92-53d8ad503789_be/recordings/contextual?language=en", } }) }, { "app": "recording-service", "header_x-original-uri": "/recording-service/customers/93071743-eb84-4f01-9d92-53d8ad503789_be/recordings/contextual?language=en", "api_method": "recordings" }) def test_extract_history_api_method(self): self.assert_parsing( { "message": json.dumps({ "app": "purchase-service", "header": { "x-original-uri": "/purchase-service/customers/c6ffb300-f529-11e7-93dc-e71e3a262bec_de/history", } }) }, { "app": "purchase-service", "header_x-original-uri": "/purchase-service/customers/c6ffb300-f529-11e7-93dc-e71e3a262bec_de/history", "api_method": "history" }) def test_extract_entitlements_api_method(self): self.assert_parsing( { "message": json.dumps({ "app": "purchase-service", "header": { "x-original-uri": "/purchase-service/customers/c6ffb300-f529-11e7-93dc-e71e3a262bec_de/entitlements/3C36E4-EOSSTB-003398520902", } }) }, { "app": "purchase-service", "header_x-original-uri": "/purchase-service/customers/c6ffb300-f529-11e7-93dc-e71e3a262bec_de/entitlements/3C36E4-EOSSTB-003398520902", "api_method": "entitlements" }) def test_extract_contextualvod_api_method(self): self.assert_parsing( { "message": json.dumps({ "app": "vod-service", "header": { "x-original-uri": "/vod-service/v2/contextualvod/omw_playmore_nl?country=be&language=nl&profileId=86e4425d-9405-4a60-9b07-d3b105d9c27e_be~~23MasterProfile&optIn=true", } }) }, { "app": "vod-service", "header_x-original-uri": "/vod-service/v2/contextualvod/omw_playmore_nl?country=be&language=nl&profileId=86e4425d-9405-4a60-9b07-d3b105d9c27e_be~~23MasterProfile&optIn=true", "api_method": "contextualvod" }) def test_extract_detailscreen_api_method(self): self.assert_parsing( { "message": json.dumps({ "app": "vod-service", "header": { "x-original-uri": "/vod-service/v2/detailscreen/crid:~~2F~~2Ftelenet.be~~2F486e1365-35fd-46b1-ac04-d42e173e7dfa?country=be&language=nl&profileId=12c41366-0a93-4e54-b1b6-efcf22c132b9_be~~23MasterProfile&t" } }) }, { "app": "vod-service", "header_x-original-uri": "/vod-service/v2/detailscreen/crid:~~2F~~2Ftelenet.be~~2F486e1365-35fd-46b1-ac04-d42e173e7dfa?country=be&language=nl&profileId=12c41366-0a93-4e54-b1b6-efcf22c132b9_be~~23MasterProfile&t", "api_method": "detailscreen" }) def test_extract_gridscreen_api_method(self): self.assert_parsing( { "message": json.dumps({ "app": "vod-service", "header": { "x-original-uri": "/vod-service/v2/gridscreen/omw_hzn4_vod/crid:~~2F~~2Fschange.com~~2F99cd6cce-f330-4dda-a8cb-190711ffb735?country=de&language=en&sortType=ordinal&sortDirection=as" } }) }, { "app": "vod-service", "header_x-original-uri": "/vod-service/v2/gridscreen/omw_hzn4_vod/crid:~~2F~~2Fschange.com~~2F99cd6cce-f330-4dda-a8cb-190711ffb735?country=de&language=en&sortType=ordinal&sortDirection=as", "api_method": "gridscreen" }) def test_extract_learn_actions_api_method(self): self.assert_parsing( { "message": json.dumps({ "app": "discovery-service", "header": { "x-original-uri": "/discovery-service/v1/learn-actions" } }) }, { "app": "discovery-service", "header_x-original-uri": "/discovery-service/v1/learn-actions", "api_method": "learn-actions" }) def test_extract_search_api_method(self): self.assert_parsing( { "message": json.dumps({ "app": "discovery-service", "header": { "x-original-uri": "/discovery-service/v1/search/contents?clientType=399&contentSourceId=1&searchTerm=News&startResults=0&maxResults=1&includeNotEntitled=true&profileId=d53facd0-cab" } }) }, { "app": "discovery-service", "header_x-original-uri": "/discovery-service/v1/search/contents?clientType=399&contentSourceId=1&searchTerm=News&startResults=0&maxResults=1&includeNotEntitled=true&profileId=d53facd0-cab", "api_method": "search" }) def test_extract_recommendations_api_method(self): self.assert_parsing( { "message": json.dumps({ "app": "discovery-service", "header": { "x-original-uri": "/discovery-service/v1/recommendations/more-like-this?clientType=305&contentSourceId=1&contentSourceId=2&profileId=12c41366-0a93-4e54-b1b6-efcf22c132b9_be~~23Mast" } }) }, { "app": "discovery-service", "header_x-original-uri": "/discovery-service/v1/recommendations/more-like-this?clientType=305&contentSourceId=1&contentSourceId=2&profileId=12c41366-0a93-4e54-b1b6-efcf22c132b9_be~~23Mast", "api_method": "recommendations" }) def test_extract_channels_api_method(self): self.assert_parsing( { "message": json.dumps({ "app": "session-service", "header": { "x-original-uri": "/session-service/session/channels/Nederland_1_HD?startTime=2018-01-16T15:07:39Z" } }) }, { "app": "session-service", "header_x-original-uri": "/session-service/session/channels/Nederland_1_HD?startTime=2018-01-16T15:07:39Z", "api_method": "channels" }) def test_extract_cpes_api_method(self): self.assert_parsing( { "message": json.dumps({ "app": "session-service", "header": { "x-original-uri": "/session-service/session/cpes/3C36E4-EOSSTB-003356297204/replay/events/crid:~~2F~~2Fbds.tv~~2F19867244,imi:00100000000E8423 => extract cpes" } }) }, { "app": "session-service", "header_x-original-uri": "/session-service/session/cpes/3C36E4-EOSSTB-003356297204/replay/events/crid:~~2F~~2Fbds.tv~~2F19867244,imi:00100000000E8423 => extract cpes", "api_method": "cpes" })
#!/usr/bin/python2 from modules.rules.rulesmaker import RuleMaker from util.configuration import Configuration import logging if __name__ == '__main__': # Load the configuration file config = Configuration('config-cedar.ini') # Configure the logger root_logger = logging.getLogger('') root_logger.setLevel(logging.DEBUG if config.verbose() else logging.INFO) logFormat = '%(asctime)s %(name)-18s %(levelname)-8s %(message)s' ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(logFormat)) root_logger.addHandler(ch) fh = logging.FileHandler('rules.log') fh.setFormatter(logging.Formatter(logFormat)) root_logger.addHandler(fh) #dataset = config.getURI('cedar',"BRT_1889_02_T1-S0") #dataset = config.getURI('cedar',"VT_1869_01_H1-S0") #dataset = config.getURI('cedar','VT_1879_01_H1-S0') #dataset = config.getURI('cedar','VT_1859_01_H1-S6') dataset = 'VT_1899_07_H1-S0' # Test rulesMaker = RuleMaker(config.get_SPARQL(), dataset, "/tmp/test.ttl") rulesMaker.loadMappings(config.get_path('mappings')) #, ['Sex','MaritalStatus'] rulesMaker.loadHeaders(config.get_graph_name('raw-data'))
import sys import os import os.path sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../integrator'))) from util.configuration import Configuration from integrat import Integrator # Create the logger import logging log = logging.getLogger(__name__) if __name__ == '__main__': # Load the configuration file config = Configuration('config.ini') # Configure the logger root_logger = logging.getLogger('') root_logger.setLevel(logging.DEBUG if config.verbose() else logging.INFO) logFormat = '%(asctime)s %(name)-12s %(levelname)-8s %(message)s' ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(logFormat)) root_logger.addHandler(ch) fh = logging.FileHandler('integrator.log', mode='w') fh.setFormatter(logging.Formatter(logFormat)) root_logger.addHandler(fh) # Create an instance of the integrator integrator = Integrator(config)
class CdnParsingTestCase(BaseMultipleMessageParsingTestCase): event_creators = create_event_creators( Configuration(dict={"timezone": { "name": "Europe/Amsterdam" }})) def test_cdn(self): self.assert_parsing( { "message": "ac4.vt2ind1d1.cdn\t2018-03-21\t04:53:39\t0.001\t52.67.236.131\t34880\texternal\tg\t-\t-\tGET\thttp://upc-cl-37.live.horizon.tv/ss/Paramount.isml/QualityLevels(96000)/Fragments(audio101_spa=184186112755222)\t1.1\t\"Apache-HttpClient/4.5.3 (Java/1.8.0_141)\"\t-\t-\t-\t200\tCACHE_MEM_HIT\t26014\t25341\t0\t186.156.250.12\t0\t-\t126\t0\t0\t-\t-\tWP:7e00000000000000\t-\t4ae1bc\tu--" }, { "s_dns": "ac4.vt2ind1d1.cdn", "@timestamp": datetime(2018, 3, 21, 4, 53, 39).replace(tzinfo=timezones["Europe/Amsterdam"]), "x_duration": "0.001", "c_ip": "52.67.236.131", "c_port": "34880", "c_vx_zone": "external", "c_vx_gloc": "g", "unknown_field1": "-", "unknown_field2": "-", "cs_method": "GET", "cs_uri": "http://upc-cl-37.live.horizon.tv/ss/Paramount.isml/QualityLevels(96000)/Fragments(audio101_spa=184186112755222)", "cs_version": "1.1", "cs_user_agent": "\"Apache-HttpClient/4.5.3 (Java/1.8.0_141)\"", "cs_refer": "-", "cs_cookie": "-", "cs_range": "-", "cs_status": "200", "s_cache_status": "CACHE_MEM_HIT", "sc_bytes": "26014", "sc_stream_bytes": "25341", "sc_dscp": "0", "s_ip": "186.156.250.12", "s_vx_rate": "0", "s_vx_rate_status": "-", "s_vx_serial": "126", "rs_stream_bytes": "0", "rs_bytes": "0", "cs_vx_token": "-", "sc_vx_download_rate": "-", "x_protohash": "WP:7e00000000000000", "additional_headers": "-", "unknown_field3": "4ae1bc", "unknown_field4": "u--", })
from util.configuration import Configuration from modules.reporting.stats import StatsGenerator import logging log = logging.getLogger(__name__) if __name__ == '__main__': # Configure a logger root_logger = logging.getLogger('') root_logger.setLevel(logging.INFO) logFormat = '%(asctime)s %(name)-12s %(levelname)-8s %(message)s' ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(logFormat)) root_logger.addHandler(ch) # Load the configuration file config = Configuration('/home/cgueret/Code/CEDAR/DataDump-mini-vt/config.ini') # Initialise the stats generator statsGenerator = StatsGenerator(config.get_SPARQL(), config.get_graph_name('raw-data'), config.get_graph_name('rules'), config.get_graph_name('release'), True) # Use the cache to speed up testing # Go ! statsGenerator.go('/tmp/stats.html')
class StagisMessageParsingTestCase(BaseMultipleMessageParsingTestCase): event_creators = create_event_creators( Configuration(dict={"timezone": { "name": "Europe/Amsterdam" }})) def test_general(self): test_cases = [ ({ "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_stagis_log_gen_v1", "message": "2017-06-03 03:45:27,624 | INFO | Catalog Ingester | 7ce7119-4504-4908-8041-0fb10cbe26b6 | 369 | EntityCounter | TVA Delta Server respond with status code 'OK' in '1576' ms" }, { "@timestamp": datetime(2017, 6, 3, 3, 45, 27, 624000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "instance_name": "Catalog Ingester", "causality_id": "7ce7119-4504-4908-8041-0fb10cbe26b6", "thread_id": "369", "class_name": "EntityCounter", "task": "TVA Delta Server response", "duration": 1576, "status": "OK", "message": "TVA Delta Server respond with status code 'OK' in '1576' ms" }), ({ "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_stagis_log_gen_v1", "message": "2017-06-03 03:45:27,624 | INFO | Catalog Ingester | 7ce7119-4504-4908-8041-0fb10cbe26b6 | 369 | EntityCounter | TVA Delta Request Starting => http://BE-W-P-OBO00159:9000/TVAMain?SyncAfter=15132539107630000, sequence number: 15132539107630000" }, { "@timestamp": datetime(2017, 6, 3, 3, 45, 27, 624000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "instance_name": "Catalog Ingester", "causality_id": "7ce7119-4504-4908-8041-0fb10cbe26b6", "thread_id": "369", "class_name": "EntityCounter", "task": "TVA Delta Server request", "sequence_number": "15132539107630000", "message": "TVA Delta Request Starting => http://BE-W-P-OBO00159:9000/TVAMain?SyncAfter=15132539107630000, sequence number: 15132539107630000" }), ({ "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_stagis_log_gen_v1", "message": "2017-06-03 03:45:27,624 | INFO | Catalog Ingester | 7ce7119-4504-4908-8041-0fb10cbe26b6 | 369 | EntityCounter | Received Delta Server Notification Sequence Number: 15132539107630000, Last Sequence Number: 15132539107630000, LogicalServerId: 'PRODIS_170906160628'" }, { "@timestamp": datetime(2017, 6, 3, 3, 45, 27, 624000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "instance_name": "Catalog Ingester", "causality_id": "7ce7119-4504-4908-8041-0fb10cbe26b6", "thread_id": "369", "class_name": "EntityCounter", "task": "Notification", "sequence_number": "15132539107630000", "message": "Received Delta Server Notification Sequence Number: 15132539107630000, Last Sequence Number: 15132539107630000, LogicalServerId: 'PRODIS_170906160628'" }), ({ "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_stagis_log_gen_v1", "message": "2017-06-03 03:45:27,624 | INFO | Catalog Ingester | 7ce7119-4504-4908-8041-0fb10cbe26b6 | 369 | EntityCounter | [Model] Model state after committing transaction [Sequence number: 15132539721087218 - Timestamp: 14/12/2017 12:19:32 - Number: 63861022] Entities: 447464 - Links: 1394780 - Channels: 350 - Events: 242420 - Programs: 132061 - Groups: 59377 - OnDemandPrograms: 7993 - BroadcastEvents: 5263" }, { "@timestamp": datetime(2017, 6, 3, 3, 45, 27, 624000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "instance_name": "Catalog Ingester", "causality_id": "7ce7119-4504-4908-8041-0fb10cbe26b6", "thread_id": "369", "class_name": "EntityCounter", "task": "Committing Transaction", "sequence_number": "15132539721087218", "number": "63861022", "entities": 447464, "links": 1394780, "channels": 350, "events": 242420, "programs": 132061, "groups": 59377, "on_demand_programs": 7993, "broadcast_events": 5263, "message": "[Model] Model state after committing transaction [Sequence number: 15132539721087218 - Timestamp: 14/12/2017 12:19:32 - Number: 63861022] Entities: 447464 - Links: 1394780 - Channels: 350 - Events: 242420 - Programs: 132061 - Groups: 59377 - OnDemandPrograms: 7993 - BroadcastEvents: 5263" }), ({ "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_stagis_log_gen_v1", "message": "2017-06-03 03:45:27,624 | INFO | Catalog Ingester | 7ce7119-4504-4908-8041-0fb10cbe26b6 | 369 | EntityCounter | Just some message, no substring match" }, { "@timestamp": datetime(2017, 6, 3, 3, 45, 27, 624000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "instance_name": "Catalog Ingester", "causality_id": "7ce7119-4504-4908-8041-0fb10cbe26b6", "thread_id": "369", "class_name": "EntityCounter", "message": "Just some message, no substring match" }), ({ "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_stagis_log_gen_v1", "message": "2017-06-03 03:45:27,624 | INFO | Catalog Ingester | 7ce7119-4504-4908-8041-0fb10cbe26b6 | 369 | EntityCounter | TVA Delta Server respond - substring match, but wrong regex" }, { "@timestamp": datetime(2017, 6, 3, 3, 45, 27, 624000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "instance_name": "Catalog Ingester", "causality_id": "7ce7119-4504-4908-8041-0fb10cbe26b6", "thread_id": "369", "class_name": "EntityCounter", "message": "TVA Delta Server respond - substring match, but wrong regex" }), ({ "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_stagis_log_gen_v1", "message": "2017-06-03 03:45:27,624 | INFO | Catalog Ingester | 7ce7119-4504-4908-8041-0fb10cbe26b6 | 369 | EntityCounter | TVA Delta Request Starting - substring match, but wrong regex" }, { "@timestamp": datetime(2017, 6, 3, 3, 45, 27, 624000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "instance_name": "Catalog Ingester", "causality_id": "7ce7119-4504-4908-8041-0fb10cbe26b6", "thread_id": "369", "class_name": "EntityCounter", "message": "TVA Delta Request Starting - substring match, but wrong regex" }), ({ "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_stagis_log_gen_v1", "message": "2017-06-03 03:45:27,624 | INFO | Catalog Ingester | 7ce7119-4504-4908-8041-0fb10cbe26b6 | 369 | EntityCounter | Received Delta Server Notification - substring match, but wrong regex" }, { "@timestamp": datetime(2017, 6, 3, 3, 45, 27, 624000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "instance_name": "Catalog Ingester", "causality_id": "7ce7119-4504-4908-8041-0fb10cbe26b6", "thread_id": "369", "class_name": "EntityCounter", "message": "Received Delta Server Notification - substring match, but wrong regex" }), ({ "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_stagis_log_gen_v1", "message": "2017-06-03 03:45:27,624 | INFO | Catalog Ingester | 7ce7119-4504-4908-8041-0fb10cbe26b6 | 369 | EntityCounter | Model state after committing transaction - substring match, but wrong regex" }, { "@timestamp": datetime(2017, 6, 3, 3, 45, 27, 624000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "instance_name": "Catalog Ingester", "causality_id": "7ce7119-4504-4908-8041-0fb10cbe26b6", "thread_id": "369", "class_name": "EntityCounter", "message": "Model state after committing transaction - substring match, but wrong regex" }) ] for test_message, parsed_message in test_cases: self.assert_parsing(test_message, parsed_message) def test_error(self): self.assert_parsing( { "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_stagis_log_err_v1", "message": "2017-10-30 10:01:05,169 | ERROR | Stagis | cfdd16c-ca17-4ab7-a2f6-5b4b4b966d77 | DefaultQuartzScheduler_Worker-1 | DataWorkflowCore | Starting the Core Provider failed." }, { "@timestamp": datetime(2017, 10, 30, 10, 1, 5, 169000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "ERROR", "instance_name": "Stagis", "causality_id": "cfdd16c-ca17-4ab7-a2f6-5b4b4b966d77", "thread_id": "DefaultQuartzScheduler_Worker-1", "class_name": "DataWorkflowCore", "message": "Starting the Core Provider failed." }) def test_interface(self): self.assert_parsing( { "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_stagis_interface_log_gen_v1", "message": "2017-06-03 03:45:27,624 | LogParameterInspector | 369 | EntityCounter | Enrich core transaction with Productizer enricher started. " }, { "@timestamp": datetime(2017, 6, 3, 3, 45, 27, 624000).replace(tzinfo=timezones["Europe/Amsterdam"]), "instance_name": "LogParameterInspector", "thread_id": "369", "class_name": "EntityCounter", "message": "Enrich core transaction with Productizer enricher started." }) def test_corecommit(self): self.assert_parsing( { "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_stagis_corecommit_log_err_v1", "message": "2017-06-03 03:45:27,624 | INFO | some_instance_name | 5465 | 369 | EntityCounter | Enrich core transaction with Productizer enricher started. " }, { "@timestamp": datetime(2017, 6, 3, 3, 45, 27, 624000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "instance_name": "some_instance_name", "causality_id": "5465", "thread_id": "369", "class_name": "EntityCounter", "message": "Enrich core transaction with Productizer enricher started." }) def test_wcf(self): self.assert_parsing( { "topic": "vagrant_in_eosdtv_lab5aobo_tst_heapp_stagis_wcf_log_gen_v1", "message": "2017-06-03 03:45:27,624 | LogParameterInspector - ------------------------------------------------------\n2017-07-24 14:49:51,721 | LogParameterInspector - Incoming call (aa84fcd0-0a23-4aa0-b66e-366559148853): net.pipe://localhost/STAGIS_EE_Services/ChannelService/GetDisplayName\n2017-07-24 14:49:51,721 | LogParameterInspector - Arguments: \n2017-07-24 14:49:51,721 | LogParameterInspector - List, Items: 1, First Item: Language: NL, Kind: NULL, Name: NPO 1\n2017-07-24 14:49:51,722 | LogParameterInspector - ned1\n2017-07-24 14:49:51,722 | LogParameterInspector - True\", \"hostname\": \"test1\", \"reason\": \"Fields amount not equal values amount\"}" }, { "@timestamp": datetime(2017, 6, 3, 3, 45, 27, 624000).replace(tzinfo=timezones["Europe/Amsterdam"]), "message": "LogParameterInspector - ------------------------------------------------------LogParameterInspector - Incoming call (aa84fcd0-0a23-4aa0-b66e-366559148853): net.pipe://localhost/STAGIS_EE_Services/ChannelService/GetDisplayNameLogParameterInspector - Arguments:LogParameterInspector - List, Items: 1, First Item: Language: NL, Kind: NULL, Name: NPO 1LogParameterInspector - ned1LogParameterInspector - True\", \"hostname\": \"test1\", \"reason\": \"Fields amount not equal values amount\"}" })
class ProdisMessageParsingTestCase(BaseMultipleMessageParsingTestCase): event_creators = create_event_creators( Configuration(dict={"timezone": { "name": "Europe/Amsterdam" }})) def test_prodis_ws_5(self): self.assert_parsing( { "source": "PRODIS_WS.log", "message": "2017-09-28 13:39:11,238 | DEBUG | Asset Propagation Thread | TSTV 3D Port | [Asset: 9a7b25dd-d5e7-4d9b-b91d-777002e11008] - Asset does not need repropagation to videoserver according to the adapter specific data" }, { "@timestamp": datetime(2017, 9, 28, 13, 39, 11, 238000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "DEBUG", "thread_name": "Asset Propagation Thread", "instance_name": "TSTV 3D Port", "message": "[Asset: 9a7b25dd-d5e7-4d9b-b91d-777002e11008] - Asset does not need repropagation to videoserver according to the adapter specific data" }) def test_prodis_ws_error_5(self): self.assert_parsing( { "source": "PRODIS_WS.Error.log", "message": "2017-10-04 14:02:30,482 | ERROR | Asset Propagation Thread | TSTV 3D Port | [Asset: 84917e83-e618-4afa-aa91-4d5c374514c2] - Asset has failed with message 'Received unexpected reply during CreateOrUpdateContent, HttpStatusCode : NotFound'." }, { "@timestamp": datetime(2017, 10, 4, 14, 2, 30, 482000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "ERROR", "thread_name": "Asset Propagation Thread", "instance_name": "TSTV 3D Port", "message": "[Asset: 84917e83-e618-4afa-aa91-4d5c374514c2] - Asset has failed with message 'Received unexpected reply during CreateOrUpdateContent, HttpStatusCode : NotFound'." }) def test_prodis_ws_6(self): self.assert_parsing( { "source": "PRODIS_WS.log", "message": "2017-06-03 03:45:27,624 | INFO | Asset Propagation Thread | TSTV 3D Port | TSTV Recording Scheduled | P=P;ChId=0062;ChName=NPO 1;AssetId=92148691-1cb3-44fd-a313-809ef36b0604;Title=Boeken;ResponseCode=RecordingInstructionSucceeded;Message='CreateOrUpdate' message for asset '92148691-1cb3-44fd-a313-809ef36b0604' has been sent to content server." }, { "@timestamp": datetime(2017, 6, 3, 3, 45, 27, 624000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "thread_name": "Asset Propagation Thread", "instance_name": "TSTV 3D Port", "component": "TSTV Recording Scheduled", "message": "P=P;ChId=0062;ChName=NPO 1;AssetId=92148691-1cb3-44fd-a313-809ef36b0604;Title=Boeken;ResponseCode=RecordingInstructionSucceeded;Message='CreateOrUpdate' message for asset '92148691-1cb3-44fd-a313-809ef36b0604' has been sent to content server." }) def test_prodis_general(self): self.assert_parsing( { "source": "PRODIS.log", "message": "2017-10-03 16:26:06,782 INFO [1] - Registered PRODIS client '21' with information: be-w-p-obo00159, 169.254.207.71, 00:50:56:B2:40:6B, a_jlambregts, admin, 8040, 2.3 2017-July-05 #1 Release patch 2" }, { "@timestamp": datetime(2017, 10, 3, 16, 26, 6, 782000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "thread": "1", "message": "Registered PRODIS client '21' with information: be-w-p-obo00159, 169.254.207.71, 00:50:56:B2:40:6B, a_jlambregts, admin, 8040, 2.3 2017-July-05 #1 Release patch 2" }) def test_prodis_general_error(self): self.assert_parsing( { "source": "PRODIS.Error.log", "message": "2017-09-20 14:35:38,140 WARN [1] - Catalog structure is not in sync with the database, the number of nodes differs between database '1937' and GUI '1936'." }, { "@timestamp": datetime(2017, 9, 20, 14, 35, 38, 140000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "WARN", "thread": "1", "message": "Catalog structure is not in sync with the database, the number of nodes differs between database '1937' and GUI '1936'." }) def test_prodis_config(self): self.assert_parsing( { "source": "PRODIS_Config.log", "message": "2017-09-28 15:02:30,667 INFO [1] - Access to the application granted to user admin" }, { "@timestamp": datetime(2017, 9, 28, 15, 02, 30, 667000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "thread": "1", "message": "Access to the application granted to user admin" }) def test_prodis_rest_client(self): self.assert_parsing( { "source": "ProdisRestClients.log", "message": "2017-10-04 11:55:23,224 DEBUG [Asset Propagation Thread] (:0) - <IngestEndTime>2017-10-04T15:00:00Z</IngestEndTime>" }, { "@timestamp": datetime(2017, 10, 4, 11, 55, 23, 224000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "DEBUG", "thread": "Asset Propagation Thread", "message": "<IngestEndTime>2017-10-04T15:00:00Z</IngestEndTime>" }) def test_prodis_rest_service(self): self.assert_parsing( { "source": "ProdisRestServices.log", "message": """2017-10-04 14:06:44,093 DEBUG [23] (:0) - <No incoming message>""" }, { "@timestamp": datetime(2017, 10, 4, 14, 06, 44, 93000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "DEBUG", "thread": "23", "message": "<No incoming message>" }) def test_prodis_reporting_rest_service(self): self.assert_parsing( { "source": "ProdisReportingRestServices.log", "message": """2017-10-04 14:06:44,093 DEBUG [23] (:0) - <No incoming message>""" }, { "@timestamp": datetime(2017, 10, 4, 14, 06, 44, 93000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "DEBUG", "thread": "23", "message": "<No incoming message>" })
def __init__(self, **kwargs): super(PiHome, self).__init__(**kwargs) self.base_config = Configuration('base.ini') self.height = self.base_config.get_int('window', 'height', 480) self.width = self.base_config.get_int('window', 'width', 800)
import os import os.path sys.path.insert( 0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../integrator'))) from util.configuration import Configuration from integrat import Integrator # Create the logger import logging log = logging.getLogger(__name__) if __name__ == '__main__': # Load the configuration file config = Configuration('config.ini') # Configure the logger root_logger = logging.getLogger('') root_logger.setLevel(logging.DEBUG if config.verbose() else logging.INFO) logFormat = '%(asctime)s %(name)-12s %(levelname)-8s %(message)s' ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(logFormat)) root_logger.addHandler(ch) fh = logging.FileHandler('integrator.log', mode='w') fh.setFormatter(logging.Formatter(logFormat)) root_logger.addHandler(fh) # Create an instance of the integrator integrator = Integrator(config)
from util.configuration import Configuration from modules.reporting.stats import StatsGenerator import logging log = logging.getLogger(__name__) if __name__ == '__main__': # Configure a logger root_logger = logging.getLogger('') root_logger.setLevel(logging.INFO) logFormat = '%(asctime)s %(name)-12s %(levelname)-8s %(message)s' ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(logFormat)) root_logger.addHandler(ch) # Load the configuration file config = Configuration( '/home/cgueret/Code/CEDAR/DataDump-mini-vt/config.ini') # Initialise the stats generator statsGenerator = StatsGenerator(config.get_SPARQL(), config.get_graph_name('raw-data'), config.get_graph_name('rules'), config.get_graph_name('release'), True) # Use the cache to speed up testing # Go ! statsGenerator.go('/tmp/stats.html')
class VROPSParsingTestCase(BaseMultipleMessageParsingTestCase): event_creators = create_event_creators( Configuration(dict={"timezone": { "name": "Europe/Amsterdam" }})) def test_vrops_single_metric(self): self.assert_parsing( { "source": "VROPS.log", "message": "disk,name=lg-l-s-uxp00012,kind=VirtualMachine usage_average=6.5333333015441895 1517835494431\n" }, { "group": "disk", "name": "lg-l-s-uxp00012", "res_kind": "VirtualMachine", "metrics": { "usage_average": 6.5333333015441895 }, "timestamp": "1517835494431" }) def test_vrops_multiple_metric(self): self.assert_parsing( { "source": "VROPS.log", "message": "summary,name=TN_DEU_PROD_CF_APP_CF_OBO_01_EPG_CF_ACS_DB_01,kind=DistributedVirtualPortgroup used_num_ports=4.0,max_num_ports=8.0,ports_down_pct=0.0 1517835494431\n" }, { "group": "summary", "name": "TN_DEU_PROD_CF_APP_CF_OBO_01_EPG_CF_ACS_DB_01", "res_kind": "DistributedVirtualPortgroup", "metrics": { "used_num_ports": 4.0, "max_num_ports": 8.0, "ports_down_pct": 0.0 }, "timestamp": "1517835494431" }) def test_vrops_with_gap(self): self.assert_parsing( { "source": "VROPS.log", "message": "resourcelimit,name=vRealize\ Operations\ Manager\ Remote\ Collector-NLCSAPVROPS005C,kind=vC-Ops-Remote-Collector numprocessesmax=192100.0,openfiles=100000.0,numprocesses=192100.0,openfilesmax=100000.0 1517835610321\n" }, { "group": "resourcelimit", "name": "vRealize\\ Operations\\ Manager\\ Remote\\ Collector-NLCSAPVROPS005C", "res_kind": "vC-Ops-Remote-Collector", "metrics": { "numprocessesmax": 192100.0, "openfiles": 100000.0, "numprocesses": 192100.0, "openfilesmax": 100000.0 }, "timestamp": "1517835610321" }) def test_vrops_random(self): self.assert_parsing( { "source": "VROPS.log", "message": "cpu,name=nlcsapesxp010.csa.internal,kind=HostSystem idle_summation=9855.2001953125,used_summation=117.13333129882812,usage_average=0.5806666612625122 1518112552376\n" }, { "group": "cpu", "name": "nlcsapesxp010.csa.internal", "res_kind": "HostSystem", "metrics": { "idle_summation": 9855.2001953125, "used_summation": 117.13333129882812, "usage_average": 0.5806666612625122 }, "timestamp": "1518112552376" }) def test_rare_case(self): self.assert_parsing( { "source": "VROPS.log", "message": "net,name=LG-W-P-VDI10028,kind=VirtualMachine droppedpct=40.50104522705078,packetstxpersec=0.3333333432674408,packetsrxpersec=1.5666667222976685 1518115192381\n" }, { "group": "net", "name": "LG-W-P-VDI10028", "res_kind": "VirtualMachine", "metrics": { "droppedpct": 40.50104522705078, "packetstxpersec": 0.3333333432674408, "packetsrxpersec": 1.5666667222976685 }, "timestamp": "1518115192381" }) def test_rare_case2(self): self.assert_parsing( { "source": "VROPS.log", "message": "availability,name=Likewise\ Service\ Manager,res_kind=vSphere\ SSO\ Likewise\ Service\ Manager resourceavailability=100.0 1518118140000\n" }, { "group": "availability", "name": "Likewise\ Service\ Manager", "res_kind": "vSphere\ SSO\ Likewise\ Service\ Manager", "metrics": { "resourceavailability": 100.0 }, "timestamp": "1518118140000" }) def test_extra_metric(self): self.assert_parsing( { "source": "VROPS.log", "message": "net,identifier=vmnic0,uuid=4f671cc2-06a4-4a24-806c-8fc329feb74a,name=lg-l-p-uxp00007,adapter=VMWARE,kind=VirtualMachine usage_average=0.0,demand=0.0,bytesrx_average=0.0,packetstxpersec=0.0,transmitted_workload=0.0,transmitted_average=0.0,maxobserved_kbps=25600.0,maxobserved_rx_kbps=12800.0,received_average=0.0,maxobserved_tx_kbps=12800.0,packetsrxpersec=0.0,received_workload=0.0,usage_workload=0.0,bytestx_average=0.0 1522077287436\n" }, { "group": "net", "name": "lg-l-p-uxp00007", "res_kind": "VirtualMachine", "metrics": { "usage_average": 0.0, "demand": 0.0, "bytesrx_average": 0.0, "packetstxpersec": 0.0, "transmitted_workload": 0.0, "transmitted_average": 0.0, "maxobserved_kbps": 25600.0, "maxobserved_rx_kbps": 12800.0, "received_average": 0.0, "maxobserved_tx_kbps": 12800.0, "packetsrxpersec": 0.0, "received_workload": 0.0, "usage_workload": 0.0, "bytestx_average": 0.0 }, "timestamp": "1522077287436" }) def test_extra_metric2(self): self.assert_parsing( { "source": "VROPS.log", "message": "guestfilesystem,uuid=7daf42e4-3ccf-4126-a3cd-cf5a3f2f053d,name=lg-l-p-obo00533,adapter=VMWARE,kind=VirtualMachine freespace_total=74.46806335449219,percentage_total=5.159259796142578,capacity_total=78.5190658569336,usage_total=4.051002502441406 1522091397392\n" }, { "group": "guestfilesystem", "name": "lg-l-p-obo00533", "res_kind": "VirtualMachine", "metrics": { "freespace_total": 74.46806335449219, "percentage_total": 5.159259796142578, "capacity_total": 78.5190658569336, "usage_total": 4.051002502441406 }, "timestamp": "1522091397392" })
class AirflowLogParsingTestCase(BaseMultipleMessageParsingTestCase): event_creators = create_event_creators( Configuration(dict={"timezone": { "name": "Europe/Amsterdam" }})) def test_airflow_dag_execution_without_subtask(self): self.assert_parsing( { "topic": "airflow_worker", "source": "/usr/local/airflow/logs/bbc_lookup_programmes_workflow/lookup_and_update_programmes/2017-10-10.333", "message": "[2017-10-27 09:55:24,555] {base_task_runner.py:113} INFO - Running: ['bash', '-c', u'airflow run bbc_lookup_programmes_workflow lookup_and_update_programmes 2017-11-27T06:55:09 --job_id 546290 --queue bbc --raw -sd DAGS_FOLDER/bbc_lookup_programmes_workflow.py']" }, { "@timestamp": datetime(2017, 10, 27, 9, 55, 24, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "dag": "bbc_lookup_programmes_workflow", "task": "lookup_and_update_programmes", "level": "INFO", "message": "Running: ['bash', '-c', u'airflow run bbc_lookup_programmes_workflow lookup_and_update_programmes 2017-11-27T06:55:09 --job_id 546290 --queue bbc --raw -sd DAGS_FOLDER/bbc_lookup_programmes_workflow.py']", "script": "base_task_runner.py:113" }) def test_airflow_dag_execution_with_subtask(self): self.assert_parsing( { "topic": "airflow_worker", "source": "/usr/local/airflow/logs/be_create_obo_assets_transcoding_driven_trigger/lookup_dir/2017-10-10.333", "message": "[2017-06-09 09:03:03,399] {__init__.py:36} INFO - Subtask: [2017-06-09 09:03:05,555] {base_hook.py:67} INFO - Using connection to: media-syndication.api.bbci.co.uk" }, { "@timestamp": datetime(2017, 06, 9, 9, 3, 03, 399000).replace(tzinfo=timezones["Europe/Amsterdam"]), "dag": "be_create_obo_assets_transcoding_driven_trigger", "task": "lookup_dir", "level": "INFO", "message": "Subtask: [2017-06-09 09:03:05,555] {base_hook.py:67} INFO - Using connection to: media-syndication.api.bbci.co.uk", "script": "__init__.py:36", "subtask_timestamp": datetime(2017, 06, 9, 9, 3, 05, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "subtask_script": "base_hook.py", "subtask_level": "INFO", "subtask_message": "Using connection to: media-syndication.api.bbci.co.uk" }) def test_airflow_dag_execution_with_subtask_and_crid(self): self.assert_parsing( { "topic": "airflow_worker", "source": "/usr/local/airflow/logs/be_create_obo_assets_transcoding_driven_trigger/lookup_dir/2017-10-10.333", "message": "[2017-06-09 09:03:03,399] {__init__.py:36} INFO - Subtask: [2017-06-09 09:05:08,555] {create_obo_assets_transcoded_workflow.py:224} INFO - Fabrix input: /obo_manage/Countries/UK/FromAirflow/crid~~3A~~2F~~2Fog.libertyglobal.com~~2FMTV~~2FPAID0000000001432979" }, { "@timestamp": datetime(2017, 06, 9, 9, 3, 03, 399000).replace(tzinfo=timezones["Europe/Amsterdam"]), "dag": "be_create_obo_assets_transcoding_driven_trigger", "task": "lookup_dir", "level": "INFO", "message": "Subtask: [2017-06-09 09:05:08,555] {create_obo_assets_transcoded_workflow.py:224} INFO - Fabrix input: /obo_manage/Countries/UK/FromAirflow/crid~~3A~~2F~~2Fog.libertyglobal.com~~2FMTV~~2FPAID0000000001432979", "script": "__init__.py:36", "subtask_timestamp": datetime(2017, 06, 9, 9, 5, 8, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "subtask_script": "create_obo_assets_transcoded_workflow.py", "subtask_level": "INFO", "subtask_message": "Fabrix input: /obo_manage/Countries/UK/FromAirflow/crid~~3A~~2F~~2Fog.libertyglobal.com~~2FMTV~~2FPAID0000000001432979", "crid": "crid://og.libertyglobal.com/MTV/PAID0000000001432979" }) def test_airflow_dag_execution_with_subtask_and_airflow_id(self): self.assert_parsing( { "topic": "airflow_worker", "source": "/usr/local/airflow/logs/create_obo_assets_transcoded_workflow/ingest_to_fabrix/2017-10-10.333", "message": "[2017-10-27 09:55:24,555] {base_task_runner.py:113} INFO - Subtask: [2017-10-27 09:55:25,555] {create_obo_assets_transcoded_workflow.py:217} INFO - Submitting asset: 92bf0465527a60db913f3490e5ce905b_3371E5144AD4597D56709497CB31A018" }, { "@timestamp": datetime(2017, 10, 27, 9, 55, 24, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "dag": "create_obo_assets_transcoded_workflow", "task": "ingest_to_fabrix", "level": "INFO", "message": "Subtask: [2017-10-27 09:55:25,555] {create_obo_assets_transcoded_workflow.py:217} INFO - Submitting asset: 92bf0465527a60db913f3490e5ce905b_3371E5144AD4597D56709497CB31A018", "script": "base_task_runner.py:113", "subtask_timestamp": datetime(2017, 10, 27, 9, 55, 25, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "subtask_script": "create_obo_assets_transcoded_workflow.py", "subtask_level": "INFO", "subtask_message": "Submitting asset: 92bf0465527a60db913f3490e5ce905b_3371E5144AD4597D56709497CB31A018", "airflow_id": "92bf0465527a60db913f3490e5ce905b_3371E5144AD4597D56709497CB31A018" }) def test_airflow_worker(self): self.assert_parsing( { "topic": "airflow_worker", "source": "/var/logs/airflow.log", "message": "[2017-06-09 06:10:36,556] {__init__.py:36} INFO - Using executor CeleryExecutor" }, { "@timestamp": datetime(2017, 06, 9, 6, 10, 36, 556000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "script": "__init__.py:36", "message": "Using executor CeleryExecutor" }) def test_airflow_without_subtask(self): self.assert_parsing( { "topic": "airflow_worker", "source": "/var/logs/airflow.log", "message": "[2017-10-27 09:55:24,555] {base_task_runner.py:113} INFO - Running: ['bash', '-c', u'airflow run bbc_lookup_programmes_workflow lookup_and_update_programmes 2017-11-27T06:55:09 --job_id 546290 --queue bbc --raw -sd DAGS_FOLDER/bbc_lookup_programmes_workflow.py']" }, { "@timestamp": datetime(2017, 10, 27, 9, 55, 24, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "message": "Running: ['bash', '-c', u'airflow run bbc_lookup_programmes_workflow lookup_and_update_programmes 2017-11-27T06:55:09 --job_id 546290 --queue bbc --raw -sd DAGS_FOLDER/bbc_lookup_programmes_workflow.py']", "script": "base_task_runner.py:113" }) def test_airflow_with_subtask(self): self.assert_parsing( { "topic": "airflow_worker", "source": "/var/logs/airflow.log", "message": "[2017-06-09 09:03:03,399] {__init__.py:36} INFO - Subtask: [2017-06-09 09:03:05,555] {base_hook.py:67} INFO - Using connection to: media-syndication.api.bbci.co.uk" }, { "@timestamp": datetime(2017, 06, 9, 9, 3, 03, 399000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "message": "Subtask: [2017-06-09 09:03:05,555] {base_hook.py:67} INFO - Using connection to: media-syndication.api.bbci.co.uk", "script": "__init__.py:36", "subtask_timestamp": datetime(2017, 06, 9, 9, 3, 05, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "subtask_script": "base_hook.py", "subtask_level": "INFO", "subtask_message": "Using connection to: media-syndication.api.bbci.co.uk" }) def test_airflow_with_subtask_and_crid(self): self.assert_parsing( { "topic": "airflow_worker", "source": "/var/logs/airflow.log", "message": "[2017-06-09 09:03:03,399] {__init__.py:36} INFO - Subtask: [2017-06-09 09:05:08,555] {create_obo_assets_transcoded_workflow.py:224} INFO - Fabrix input: /obo_manage/Countries/UK/FromAirflow/crid~~3A~~2F~~2Fog.libertyglobal.com~~2FMTV~~2FPAID0000000001432979" }, { "@timestamp": datetime(2017, 06, 9, 9, 3, 03, 399000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "message": "Subtask: [2017-06-09 09:05:08,555] {create_obo_assets_transcoded_workflow.py:224} INFO - Fabrix input: /obo_manage/Countries/UK/FromAirflow/crid~~3A~~2F~~2Fog.libertyglobal.com~~2FMTV~~2FPAID0000000001432979", "script": "__init__.py:36", "subtask_timestamp": datetime(2017, 06, 9, 9, 5, 8, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "subtask_script": "create_obo_assets_transcoded_workflow.py", "subtask_level": "INFO", "subtask_message": "Fabrix input: /obo_manage/Countries/UK/FromAirflow/crid~~3A~~2F~~2Fog.libertyglobal.com~~2FMTV~~2FPAID0000000001432979", "crid": "crid://og.libertyglobal.com/MTV/PAID0000000001432979" }) def test_airflow_with_subtask_and_airflow_id(self): self.assert_parsing( { "topic": "airflow_worker", "source": "/var/logs/airflow.log", "message": "[2017-10-27 09:55:24,555] {base_task_runner.py:113} INFO - Subtask: [2017-10-27 09:55:25,555] {create_obo_assets_transcoded_workflow.py:217} INFO - Submitting asset: 92bf0465527a60db913f3490e5ce905b_3371E5144AD4597D56709497CB31A018" }, { "@timestamp": datetime(2017, 10, 27, 9, 55, 24, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "message": "Subtask: [2017-10-27 09:55:25,555] {create_obo_assets_transcoded_workflow.py:217} INFO - Submitting asset: 92bf0465527a60db913f3490e5ce905b_3371E5144AD4597D56709497CB31A018", "script": "base_task_runner.py:113", "subtask_timestamp": datetime(2017, 10, 27, 9, 55, 25, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "subtask_script": "create_obo_assets_transcoded_workflow.py", "subtask_level": "INFO", "subtask_message": "Submitting asset: 92bf0465527a60db913f3490e5ce905b_3371E5144AD4597D56709497CB31A018", "airflow_id": "92bf0465527a60db913f3490e5ce905b_3371E5144AD4597D56709497CB31A018" }) def test_manager_scheduler_latest_with_dags(self): self.assert_parsing( { "topic": "airflowmanager_scheduler_latest", "source": "any.log", "message": "[2017-10-27 09:55:24,555] {jobs.py:1537} DagFileProcessor72328 INFO - DAG(s) ['be_create_obo_thumbnails_workflow'] retrieved from /usr/local/airflow/dags/be_create_obo_thumbnails_workflow.py" }, { "@timestamp": datetime(2017, 10, 27, 9, 55, 24, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "dag_processor": "DagFileProcessor72328", "message": "DAG(s) ['be_create_obo_thumbnails_workflow'] retrieved from /usr/local/airflow/dags/be_create_obo_thumbnails_workflow.py", "script": "jobs.py", "tenant": "be", "script_line": "1537", "dag": "be_create_obo_thumbnails_workflow" }) def test_manager_scheduler_latest_with_dag_run(self): self.assert_parsing( { "topic": "airflowmanager_scheduler_latest", "source": "any.log", "message": "[2017-10-27 09:55:24,555] {models.py:4204} DagFileProcessor72223 INFO - Updating state for <DagRun be_create_obo_assets_transcoding_driven_workflow @ 2018-03-06 15:24:17.806572: be-crid~~3A~~2F~~2Ftelenet.be~~2F8ebcb1e0-8295-40b4-b5ee-fa6c0dd329a6-2018-03-06T15:20:50.800499, externally triggered: True> considering 20 task(s)" }, { "@timestamp": datetime(2017, 10, 27, 9, 55, 24, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "dag_processor": "DagFileProcessor72223", "message": "Updating state for <DagRun be_create_obo_assets_transcoding_driven_workflow @ 2018-03-06 15:24:17.806572: be-crid~~3A~~2F~~2Ftelenet.be~~2F8ebcb1e0-8295-40b4-b5ee-fa6c0dd329a6-2018-03-06T15:20:50.800499, externally triggered: True> considering 20 task(s)", "script": "models.py", "tenant": "be", "dag": "be_create_obo_assets_transcoding_driven_workflow", "script_line": "4204" }) def test_manager_scheduler_latest_with_dag(self): self.assert_parsing( { "topic": "airflowmanager_scheduler_latest", "source": "any.log", "message": "[2017-10-27 09:55:24,555] {models.py:4204} DagFileProcessor72223 INFO - Skipping SLA check for <DAG: be_create_obo_assets_transcoding_driven_trigger> because no tasks in DAG have SLAs" }, { "@timestamp": datetime(2017, 10, 27, 9, 55, 24, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "dag_processor": "DagFileProcessor72223", "message": "Skipping SLA check for <DAG: be_create_obo_assets_transcoding_driven_trigger> because no tasks in DAG have SLAs", "script": "models.py", "tenant": "be", "dag": "be_create_obo_assets_transcoding_driven_trigger", "script_line": "4204" }) def test_manager_scheduler_latest_without_dag(self): self.assert_parsing( { "topic": "airflowmanager_scheduler_latest", "source": "any.log", "message": "[2017-10-27 09:55:24,555] {models.py:4204} DagFileProcessor72223 INFO - Finding 'running' jobs without a recent heartbeat" }, { "@timestamp": datetime(2017, 10, 27, 9, 55, 24, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "dag_processor": "DagFileProcessor72223", "message": "Finding 'running' jobs without a recent heartbeat", "script": "models.py", "script_line": "4204" }) def test_manager_scheduler_airflow(self): self.assert_parsing( { "topic": "airflowmanager_scheduler_airflow", "message": "[2017-10-27 09:55:24,555] {jobs.py:1195} INFO - Executor reports be_create_obo_assets_transcoding_driven_workflow.register_on_license_server execution_date=2018-04-13 09:20:53.573308 as success" }, { "@timestamp": datetime(2017, 10, 27, 9, 55, 24, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "message": "Executor reports be_create_obo_assets_transcoding_driven_workflow.register_on_license_server execution_date=2018-04-13 09:20:53.573308 as success", "script": "jobs.py", "script_line": "1195" }) def test_airflow_manager_webui_without_script(self): self.assert_parsing( { "topic": "airflowmanager_webui", "source": "any.log", "message": "[2018-04-12 10:12:16 +0000] [16262] [INFO] Booting worker with pid: 16262" }, { "@timestamp": datetime(2018, 4, 12, 10, 12, 16, 0).replace(tzinfo=pytz.utc), "level": "INFO", "thread_id": "16262", "message": "Booting worker with pid: 16262" }) def test_airflow_manager_webui_with_script(self): self.assert_parsing( { "topic": "airflowmanager_webui", "source": "any.log", "message": "[2017-10-27 09:55:24,555] [16262] {models.py:168} INFO - Filling up the DagBag from /usr/local/airflow/dags" }, { "@timestamp": datetime(2017, 10, 27, 9, 55, 24, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "thread_id": "16262", "message": "Filling up the DagBag from /usr/local/airflow/dags", "script": "models.py", "script_line": "168" }) def test_airflow_manager_webui_with_ip(self): self.assert_parsing( { "topic": "airflowmanager_webui", "source": "any.log", "message": '''172.31.139.17 - - [16/Apr/2018:15:18:27 +0000] "GET /admin/airflow/task?execution_date=2018-04-13T14%3A33%3A05.290779&dag_id=de_create_obo_assets_workflow&task_id=failure_detector HTTP/1.1" 200 36528 "http://webserver1.airflow-prod-a.horizongo.eu/admin/taskinstance/?flt0_dag_id_contains=de_create_obo_assets_workflow&flt1_state_contains=failed&flt4_execution_date_between=2018-04-13+00%3A00%3A00+to+2018-04-13+23%3A59%3A59" "Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko"''', }, { "@timestamp": datetime(2018, 4, 16, 15, 18, 27).replace(tzinfo=pytz.utc), "message": '''"GET /admin/airflow/task?execution_date=2018-04-13T14%3A33%3A05.290779&dag_id=de_create_obo_assets_workflow&task_id=failure_detector HTTP/1.1" 200 36528 "http://webserver1.airflow-prod-a.horizongo.eu/admin/taskinstance/?flt0_dag_id_contains=de_create_obo_assets_workflow&flt1_state_contains=failed&flt4_execution_date_between=2018-04-13+00%3A00%3A00+to+2018-04-13+23%3A59%3A59" "Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko"''', "ip": "172.31.139.17" }) def test_airflow_manager_scheduler_latest_dag_status_failed(self): self.assert_parsing( { "topic": "airflowmanager_scheduler_latest", "source": "any.log", "message": "[2017-10-27 09:55:24,555] {models.py:4204} DagFileProcessor7191 INFO - Deadlock; marking run <DagRun be_move_obo_linear_cycle @ 2018-04-19 03:08:00: scheduled__2018-04-19T03:08:00, externally triggered: False> failed" }, { "@timestamp": datetime(2017, 10, 27, 9, 55, 24, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "dag_processor": "DagFileProcessor7191", "message": "Deadlock; marking run <DagRun be_move_obo_linear_cycle @ 2018-04-19 03:08:00: scheduled__2018-04-19T03:08:00, externally triggered: False> failed", "script": "models.py", "action": "RUN", "dag": "be_move_obo_linear_cycle", "status": "FAILURE", "tenant": "be", "script_line": "4204" }) def test_airflow_manager_scheduler_latest_dag_status_success(self): self.assert_parsing( { "topic": "airflowmanager_scheduler_latest", "source": "any.log", "message": "[2017-10-27 09:55:24,555] {models.py:4204} DagFileProcessor7191 INFO - Marking run <DagRun be_create_obo_assets_transcoding_driven_trigger @ 2018-04-19 07:10:00: scheduled__2018-04-19T07:10:00, externally triggered: False> successful" }, { "@timestamp": datetime(2017, 10, 27, 9, 55, 24, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "dag_processor": "DagFileProcessor7191", "message": "Marking run <DagRun be_create_obo_assets_transcoding_driven_trigger @ 2018-04-19 07:10:00: scheduled__2018-04-19T07:10:00, externally triggered: False> successful", "script": "models.py", "dag": "be_create_obo_assets_transcoding_driven_trigger", "status": "SUCCESS", "action": "RUN", "tenant": "be", "script_line": "4204" }) def test_airflow_manager_scheduler_latest_with_no_dag_no_status(self): self.assert_parsing( { "topic": "airflowmanager_scheduler_latest", "source": "any.log", "message": "[2017-10-27 09:55:24,555] {models.py:4204} DagFileProcessor7191 INFO - Marking run <NOT_RUN_DUG be_create_obo_assets_transcoding_driven_trigger @ 2018-04-19 07:10:00: scheduled__2018-04-19T07:10:00, externally triggered: False> successful" }, { "@timestamp": datetime(2017, 10, 27, 9, 55, 24, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "dag_processor": "DagFileProcessor7191", "message": "Marking run <NOT_RUN_DUG be_create_obo_assets_transcoding_driven_trigger @ 2018-04-19 07:10:00: scheduled__2018-04-19T07:10:00, externally triggered: False> successful", "script": "models.py", "script_line": "4204" }) def test_airflow_manager_scheduler_latest_with_with_action_created(self): self.assert_parsing( { "topic": "airflowmanager_scheduler_latest", "source": "any.log", "message": "[2017-10-27 09:55:24,555] {models.py:4204} DagFileProcessor7191 INFO - Created <DagRun be_move_obo_linear_cycle @ 2018-04-26 13:04:00: scheduled__2018-04-26T13:04:00, externally triggered: False>" }, { "@timestamp": datetime(2017, 10, 27, 9, 55, 24, 555000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "dag": "be_move_obo_linear_cycle", "dag_processor": "DagFileProcessor7191", "message": "Created <DagRun be_move_obo_linear_cycle @ 2018-04-26 13:04:00: scheduled__2018-04-26T13:04:00, externally triggered: False>", "script": "models.py", "action": "CREATE", "script_line": "4204", "tenant": "be" })
def run(self): logger.debug('Starting synchronization threads {}'.format( int(QThread.currentThreadId()))) drive_client = None session_id = None account_manager = AccountManager() for account_id in account_manager.list_accounts(): account = account_manager.get_account(account_id) try: drive_client = DriveClient( username=account.username, password=account.password, url='{}://{}:{}{}'.format(account.url_scheme, account.url_host, str(account.url_port), account.url_uri), local_synchronization_directory=account. get_account_root_directory_path(), synchronization_statefile=account. get_synchronization_statefile(), compatible_api_version=Configuration().get_setting( 'drive_client', 'api.compatibility.api_version'), api_client_identifier=Configuration().get_setting( 'drive_client', 'api.defaults.compatibility.client_identifier', default='OXNote'), notification_callback=partial( self.signal_synchronization_note_updated.emit, account_id)) self.signal_synchronization_status_update.emit( 'Synchronizing notes for account {}'.format( account.username)) drive_client.synchronize_files(account.application_data_folder) self.signal_synchronization_status_update.emit( 'Synchronizing previews for account {}'.format( account.username)) drive_client.synchronize_files(account.oxnote_home_folder) drive_client.close() except ConnectionError as e: logger.warning(e) except (SessionException, RequestException, LocationValueError, PermissionException) as e: logger.error(e) except Exception as e: logger.error(e, exc_info=True) finally: if drive_client: del drive_client if session_id: try: HttpApiSession().logout(session_id) except: logger.error( 'Unable to terminate session {}'.format(session_id)) self.signal_restart_timer.emit() self.signal_synchronization_finished.emit()
class NokiaVrmMessageParsingTestCase(BaseMultipleMessageParsingTestCase): event_creators = create_event_creators(Configuration(dict={"timezone": {"name": "Europe/Amsterdam"}})) def test_scheduler_bs_audit(self): self.assert_parsing( { "source": "/opt/vrm/jetty-scheduler-BS/logs/scheduler_bs_audit.log", "message": "26-Feb-2018 17:41:06.356 | INFO | | a238c94a-0a16-46f8-ba53-c207444bd8d2 | INTERNAL | 172.23.41.73 | [[GET] /scheduler/web/Record/addByProgram] | [{schema=1.0, eventId=crid:~~2F~~2Ftelenet.be~~2F7eda8354-982e-4951-ba32-7aa1219d7004,imi:00100000001C566C, userId=subscriber_000556_be}] | Entity not found (info) | Not found Event entity with programId field equals to crid:~~2F~~2Ftelenet.be~~2F7eda8354-982e-4951-ba32-7aa1219d7004,imi:00100000001C566C" }, { "@timestamp": datetime(2018, 2, 26, 17, 41, 6, 356000).replace(tzinfo=timezones["Europe/Amsterdam"]), "field1": "", "level": "INFO", "event_id": "a238c94a-0a16-46f8-ba53-c207444bd8d2", "domain": "INTERNAL", "ip": "172.23.41.73", "method": "[[GET] /scheduler/web/Record/addByProgram]", "params": "[{schema=1.0, eventId=crid:~~2F~~2Ftelenet.be~~2F7eda8354-982e-4951-ba32-7aa1219d7004,imi:00100000001C566C, userId=subscriber_000556_be}]", "description": "Entity not found (info)", "message": "Not found Event entity with programId field equals to crid:~~2F~~2Ftelenet.be~~2F7eda8354-982e-4951-ba32-7aa1219d7004,imi:00100000001C566C" } ) def test_console_bs_audit(self): self.assert_parsing( { "source": "/opt/vrm/jetty-dvr-console-BS/logs/console_bs_audit.log", "message": "20-Sep-2017 15:53:00.010 | ERROR | 41cd2f05-5f44-4cc9-97cc-5cb9007b27bf | INTERNAL | | [[GET] /dvr-console/data/sysadmin/status] | [{entriesPageSize=1000, sortDirection=asc, entriesStartIndex=0, sortField=instanceName, count=true}] | Request to service ended with communication error | NoRouteToHostException invoking request GET http://10.95.97.91:8088/auth. Exception message: No route to host (Host unreachable)" }, { "@timestamp": datetime(2017, 9, 20, 15, 53, 0, 10000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "ERROR", "ip": "", "event_id": "41cd2f05-5f44-4cc9-97cc-5cb9007b27bf", "domain": "INTERNAL", "method": "[[GET] /dvr-console/data/sysadmin/status]", "params": "[{entriesPageSize=1000, sortDirection=asc, entriesStartIndex=0, sortField=instanceName, count=true}]", "description": "Request to service ended with communication error", "message": "NoRouteToHostException invoking request GET http://10.95.97.91:8088/auth. Exception message: No route to host (Host unreachable)" } ) def test_authentication_bs_audit(self): self.assert_parsing( { "source": "/opt/vrm/jetty-dvr-console-BS/logs/authentication_bs_audit.log", "message": "27-Nov-2017 11:11:14.331 | WARN | 1b5e781d-3c79-4ecc-b369-ee2bd25b6fe9 | CONTROL | 10.95.97.61 | [[GET] /auth/control/checkAlive] | [{depth=deep}] | Service checkAlive request with response false | user DS is not alive" }, { "@timestamp": datetime(2017, 11, 27, 11, 11, 14, 331000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "WARN", "event_id": "1b5e781d-3c79-4ecc-b369-ee2bd25b6fe9", "domain": "CONTROL", "ip": "10.95.97.61", "method": "[[GET] /auth/control/checkAlive]", "params": "[{depth=deep}]", "description": "Service checkAlive request with response false", "message": "user DS is not alive" } ) def test_cdvr_bs_audit(self): self.assert_parsing( { "source": "/opt/vrm/jetty-dvr-console-BS/logs/cdvr_bs_audit.log", "message": "17-Jan-2018 11:30:30.035 | ERROR | f37d72f0-d11d-4adf-9144-02db7d109397 | INTERNAL | local | [Job execute] | [{}] | Request to service returns unexpected HTTP response status code | Unexpected HTTP response status code 404 invoking request GET http://10.16.174.129:5929" }, { "@timestamp": datetime(2018, 1, 17, 11, 30, 30, 35000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "ERROR", "event_id": "f37d72f0-d11d-4adf-9144-02db7d109397", "domain": "INTERNAL", "ip": "local", "method": "[Job execute]", "params": "[{}]", "description": "Request to service returns unexpected HTTP response status code", "message": "Unexpected HTTP response status code 404 invoking request GET http://10.16.174.129:5929" } ) def test_epg_ds_audit(self): self.assert_parsing({ "topic": "nokiavrmds_epgaudit", "source": "/opt/vrm/jetty-dvr-console-BS/logs/epg_audit.log", "message": "29-Nov-2017 19:30:01.480 | ERROR | 685767e5-7da7-4b15-bc90-c2dae3d129e6 | 640572db-5331-48c3-a446-072ad80c02d1 | INTERNAL | 10.95.97.66 | [[PUT] /epg/data/Event] | [{schema=2.0, entries=true, query=.where(eq(e.internalId,49229),or(eq(e.locked,null()),eq(e.locked,false()))), count=true}] | Data access unrecoverable error | Unrecoverable error details: Duplicate entry '' for key 'alternateId'" }, { "@timestamp": datetime(2017, 11, 29, 19, 30, 1, 480000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "ERROR", "event_id_1": "685767e5-7da7-4b15-bc90-c2dae3d129e6", "event_id_2": "640572db-5331-48c3-a446-072ad80c02d1", "domain": "INTERNAL", "ip": "10.95.97.66", "method": "[[PUT] /epg/data/Event]", "params": "[{schema=2.0, entries=true, query=.where(eq(e.internalId,49229),or(eq(e.locked,null()),eq(e.locked,false()))), count=true}]", "description": "Data access unrecoverable error", "message": "Unrecoverable error details: Duplicate entry '' for key 'alternateId'" }) def test_epg_bs_audit(self): self.assert_parsing({ "topic": "nokiavrmbs_epgaudit", "source": "/opt/vrm/jetty-dvr-console-BS/logs/epg_audit.log", "message": "29-Nov-2017 19:30:01.480 | ERROR | 685767e5-7da7-4b15-bc90-c2dae3d129e6 | 640572db-5331-48c3-a446-072ad80c02d1 | INTERNAL | 10.95.97.66 | [[PUT] /epg/data/Event] | [{schema=2.0, entries=true, query=.where(eq(e.internalId,49229),or(eq(e.locked,null()),eq(e.locked,false()))), count=true}] | Data access unrecoverable error | Unrecoverable error details: Duplicate entry '' for key 'alternateId'" }, { "@timestamp": datetime(2017, 11, 29, 19, 30, 1, 480000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "ERROR", "event_id_1": "685767e5-7da7-4b15-bc90-c2dae3d129e6", "event_id_2": "640572db-5331-48c3-a446-072ad80c02d1", "domain": "INTERNAL", "ip": "10.95.97.66", "method": "[[PUT] /epg/data/Event]", "params": "[{schema=2.0, entries=true, query=.where(eq(e.internalId,49229),or(eq(e.locked,null()),eq(e.locked,false()))), count=true}]", "description": "Data access unrecoverable error", "message": "Unrecoverable error details: Duplicate entry '' for key 'alternateId'" }) def test_epg_audit_fails_with_non_existing_topic(self): row = { "topic": "invalid_topic", "source": "/opt/vrm/jetty-dvr-console-BS/logs/epg_audit.log", "message": "29-Nov-2017 19:30:01.480 | ERROR | 685767e5-7da7-4b15-bc90-c2dae3d129e6 | 640572db-5331-48c3-a446-072ad80c02d1 | INTERNAL | 10.95.97.66 | [[PUT] /epg/data/Event] | [{schema=2.0, entries=true, query=.where(eq(e.internalId,49229),or(eq(e.locked,null()),eq(e.locked,false()))), count=true}] | Data access unrecoverable error | Unrecoverable error details: Duplicate entry '' for key 'alternateId'" } with self.assertRaises(ParsingException): self.event_creators.get_parsing_context(row).event_creator.create(row) def test_cdvr_audit(self): self.assert_parsing({ "source": "/opt/vrm/jetty-dvr-console-DS/logs/cDVR_audit.log", "message": "27-Nov-2017 03:30:24.653 | ERROR | 62f6bf96-e92c-4eea-bea0-4571e4dbc94a | dd69dbe2-0e53-4712-85ce-1f395a7c0732 | INTERNAL | | [[GET] /cDVR/data/Record] | [{schema=1.0, byUserId=Jef_be, fields=eventId,actualStartTime,year,episode,seasonNumber,source,seriesId,alreadyWatched,duration,seasonName,name,actualEndTime,pinProtected,startTime,id,endTime,programId,channelId,status}] | Data access unrecoverable error | Unrecoverable error details: Got error 4009 'Cluster Failure' from NDBCLUSTER. Exception message: java.sql.SQLException: Got error 4009 'Cluster Failure' from NDBCLUSTER" }, { "@timestamp": datetime(2017, 11, 27, 3, 30, 24, 653000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "ERROR", "event_id_1": "62f6bf96-e92c-4eea-bea0-4571e4dbc94a", "event_id_2": "dd69dbe2-0e53-4712-85ce-1f395a7c0732", "domain": "INTERNAL", "ip": "", "method": "[[GET] /cDVR/data/Record]", "params": "[{schema=1.0, byUserId=Jef_be, fields=eventId,actualStartTime,year,episode,seasonNumber,source,seriesId,alreadyWatched,duration,seasonName,name,actualEndTime,pinProtected,startTime,id,endTime,programId,channelId,status}]", "description": "Data access unrecoverable error", "message": "Unrecoverable error details: Got error 4009 'Cluster Failure' from NDBCLUSTER. Exception message: java.sql.SQLException: Got error 4009 'Cluster Failure' from NDBCLUSTER" }) def test_user_audit(self): self.assert_parsing({ "source": "/opt/vrm/jetty-dvr-console-DS/logs/user_audit.log", "message": "27-Nov-2017 10:35:01.718 | ERROR | d493b5da-13a4-4504-81af-484bc09329b7 | b381adc9-9d87-4dbf-8c26-b58bc92c24e9 | INTERNAL | 10.95.97.65 | [[GET] /user/data/User] | [{schema=1.0, byId=dfb5e30d-c47e-4f10-94ea-e8faba89c345_be, fields=occupied,quota,quotaType,occupiedNumRecordings,occupiedBytes,disabled}] | Data access unrecoverable error | Unrecoverable error details: Connection refused (Connection refused)" }, { "@timestamp": datetime(2017, 11, 27, 10, 35, 1, 718000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "ERROR", "event_id_1": "d493b5da-13a4-4504-81af-484bc09329b7", "event_id_2": "b381adc9-9d87-4dbf-8c26-b58bc92c24e9", "domain": "INTERNAL", "ip": "10.95.97.65", "method": "[[GET] /user/data/User]", "params": "[{schema=1.0, byId=dfb5e30d-c47e-4f10-94ea-e8faba89c345_be, fields=occupied,quota,quotaType,occupiedNumRecordings,occupiedBytes,disabled}]", "description": "Data access unrecoverable error", "message": "Unrecoverable error details: Connection refused (Connection refused)" }) def test_lgienh_api_audit(self): self.assert_parsing({ "source": "/opt/vrm/jetty-dvr-console-DS/logs/lgienhapi_bs.log", "message": "2018-01-17 12:50:54.336 | INFO | RecordingsEndpoint | getRecordings: user: 17f5a9c1-db47-4f1d-939a-842a72f57b59_be" }, { "@timestamp": datetime(2018, 1, 17, 12, 50, 54, 336000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "endpoint": "RecordingsEndpoint", "request": "getRecordings: user: 17f5a9c1-db47-4f1d-939a-842a72f57b59_be" }) def test_schange_import_bs_audit(self): self.assert_parsing( { "source": "/opt/vrm/jetty-dvr-console-BS/logs/schange_import_bs_audit.log", "message": "17-Jan-2018 13:10:01.427 | INFO | 1563377d-8331-4ac1-93c3-089267cb815f | INTERNAL | local | [Job execute] | [{}] | Request to service returns a operation status response | Error response: title = Data Access Error, description = Data access unrecoverable error, result code = 5003" }, { "@timestamp": datetime(2018, 1, 17, 13, 10, 01, 427000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "event_id": "1563377d-8331-4ac1-93c3-089267cb815f", "domain": "INTERNAL", "ip": "local", "method": "[Job execute]", "params": "[{}]", "description": "Request to service returns a operation status response", "message": "Error response: title = Data Access Error, description = Data access unrecoverable error, result code = 5003" } ) def vspp_adapter_bs_audit(self): self.assert_parsing( { "source": "/opt/vrm/jetty-dvr-console-BS/logs/schange_import_bs_audit.log", "message": "17-Jan-2018 13:01:42.327 | INFO | c1436d45-7d9b-4166-a84f-c2985a5e34a7 | API | 10.16.174.129 | [[POST] /vspp-adapter/Notification/Notify] | [{}] | Request execution finished | Request finished successfully" }, { "@timestamp": datetime(2018, 1, 17, 13, 1, 42, 327000).replace(tzinfo=timezones["Europe/Amsterdam"]), "level": "INFO", "event_id": "c1436d45-7d9b-4166-a84f-c2985a5e34a7", "domain": "API", "ip": "10.16.174.129", "method": "[[POST] /vspp-adapter/Notification/Notify]", "params": "[{}]", "description": "Request execution finished", "message": "Request finished successfully" } )
def get_synchronization_statefile(self): return os.path.join( self.get_account_root_directory_path(), Configuration().get_setting('drive_client', 'synchronization.state_filename', default='state.yaml'))
#!/usr/bin/python2 from modules.rules.rulesmaker import RuleMaker from util.configuration import Configuration import logging if __name__ == '__main__': # Load the configuration file config = Configuration('config-cedar.ini') # Configure the logger root_logger = logging.getLogger('') root_logger.setLevel(logging.DEBUG if config.verbose() else logging.INFO) logFormat = '%(asctime)s %(name)-18s %(levelname)-8s %(message)s' ch = logging.StreamHandler() ch.setFormatter(logging.Formatter(logFormat)) root_logger.addHandler(ch) fh = logging.FileHandler('rules.log') fh.setFormatter(logging.Formatter(logFormat)) root_logger.addHandler(fh) #dataset = config.getURI('cedar',"BRT_1889_02_T1-S0") #dataset = config.getURI('cedar',"VT_1869_01_H1-S0") #dataset = config.getURI('cedar','VT_1879_01_H1-S0') #dataset = config.getURI('cedar','VT_1859_01_H1-S6') dataset = 'VT_1899_07_H1-S0' # Test rulesMaker = RuleMaker(config.get_SPARQL(), dataset, "/tmp/test.ttl") rulesMaker.loadMappings( config.get_path('mappings')) #, ['Sex','MaritalStatus']