def process(self): self.logger.info("Downloading report from %s" % self.parameters.ftps_host + ':' + str(self.parameters.ftps_port)) ftps = FTPS() ftps.connect(host=self.parameters.ftps_host, port=self.parameters.ftps_port) if hasattr(self.parameters, 'ftps_username') \ and hasattr(self.parameters, 'ftps_password'): ftps.login(user=self.parameters.ftps_username, passwd=self.parameters.ftps_password) ftps.prot_p() cwd = '/' if hasattr(self.parameters, 'ftps_directory'): self.logger.info('Changing working directory to: ' + self.parameters.ftps_directory) cwd = self.parameters.ftps_directory ftps.cwd(cwd) filemask = '*' if hasattr(self.parameters, 'ftps_file'): self.logger.info('Setting filemask to to: ' + self.parameters.ftps_file) filemask = self.parameters.ftps_file mem = io.BytesIO() files = fnmatch.filter(ftps.nlst(), filemask) self.logger.info('Found following files in the directory: ' + repr(files)) self.logger.info('Looking for latest file matching following pattern: ' + filemask) if files: self.logger.info('Retrieving file: ' + files[-1]) ftps.retrbinary("RETR " + files[-1], mem.write) else: self.logger.error("No file found, terminating download") return self.logger.info("Report downloaded.") raw_reports = [] try: zfp = zipfile.ZipFile(mem, "r") except zipfile.BadZipfile: raw_reports.append(mem.getvalue()) else: self.logger.info('Downloaded zip file, extracting following files: ' + ', '.join(zfp.namelist())) for filename in zfp.namelist(): raw_reports.append(zfp.read(filename)) for raw_report in raw_reports: report = Report() report.add("raw", raw_report, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.url", 'ftps://' + self.parameters.ftps_host + ':' + str(self.parameters.ftps_port), sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) self.send_message(report)
def process(self): self.logger.info("Downloading report from %s" % self.parameters.http_url) resp = requests.get(url=self.parameters.http_url, auth=self.auth, proxies=self.proxy, headers=self.http_header, verify=self.http_verify_cert) if resp.status_code // 100 != 2: raise ValueError('HTTP response status code was {}.' ''.format(resp.status_code)) self.logger.info("Report downloaded.") raw_reports = [] try: zfp = zipfile.ZipFile(io.BytesIO(resp.content), "r") except zipfile.BadZipfile: raw_reports.append(resp.text) else: self.logger.info('Downloaded zip file, extracting following files:' ' ' + ', '.join(zfp.namelist())) for filename in zfp.namelist(): raw_reports.append(zfp.read(filename)) for raw_report in raw_reports: report = Report() report.add("raw", raw_report) report.add("feed.name", self.parameters.feed) report.add("feed.url", self.parameters.http_url) report.add("feed.accuracy", self.parameters.accuracy) self.send_message(report)
def test_generate_reports_with_chunking_and_copying_header(self): """Test generate_reports with chunking and copying the header""" chunk_size = 1000 # This test only makes sense if the test data actually is longer # than the chunk size self.assertTrue(chunk_size < len(csv_test_data)) template = Report(harmonization=HARM) template.add("feed.name", "test_generate_reports_with_chunking_and_header") observation_time = template["time.observation"] original_header = io.BytesIO(csv_test_data).readline() decoded_chunks = [original_header] for report in generate_reports(template, io.BytesIO(csv_test_data), chunk_size=chunk_size, copy_header_line=True): self.assertEqual(report["feed.name"], "test_generate_reports_with_chunking_and_header") self.assertEqual(report["time.observation"], observation_time) report_data = io.BytesIO(base64.b64decode(report["raw"])) header = report_data.readline() chunk = report_data.read() self.assertEqual(original_header, header) decoded_chunks.append(chunk) self.assertEqual(b"".join(decoded_chunks), csv_test_data)
def process(self): # Grab the events from MISP misp_result = self.misp.search( tags=self.parameters.misp_tag_to_process) # Process the response and events if 'response' in misp_result: # Extract the MISP event details for e in misp_result['response']: misp_event = e['Event'] # Send the results to the parser report = Report() report.add('raw', json.dumps(misp_event, sort_keys=True)) report.add('feed.name', self.parameters.feed) report.add('feed.url', self.parameters.misp_url) report.add('feed.accuracy', self.parameters.accuracy) self.send_message(report) # Finally, update the tags on the MISP events. # Note PyMISP does not currently support this so we use # the API URLs directly with the requests module. for misp_event in misp_result['response']: # Remove the 'to be processed' tag self.misp.remove_tag(misp_event, self.parameters.misp_tag_to_process) # Add a 'processed' tag to the event self.misp.add_tag(misp_event, self.parameters.misp_tag_processed)
def on_receive(self, data): for line in data.decode().splitlines(): line = line.strip() if line == "": continue report = Report() report.add("raw", str(line)) self.send_message(report)
def process(self): self.logger.info("Downloading report through API") https_proxy = getattr(self.parameters, 'http_ssl_proxy', None) otx = OTXv2(self.parameters.api_key, proxy=https_proxy) pulses = otx.getall() self.logger.info("Report downloaded.") report = Report() report.add("raw", json.dumps(pulses)) self.send_message(report)
def test_generate_reports_no_chunking(self): """Test generate_reports with chunking disabled""" template = Report(harmonization=HARM) template.add("feed.name", "test_generate_reports_no_chunking") [report] = list(generate_reports(template, io.BytesIO(csv_test_data), chunk_size=None, copy_header_line=False)) self.assertEqual(report["feed.name"], "test_generate_reports_no_chunking") self.assertEqual(base64.b64decode(report["raw"]), csv_test_data)
def on_receive(self, data): for line in data.decode().splitlines(): line = line.strip() if line == "": continue report = Report() report.add("raw", line) report.add("feed.url", self.parameters.http_url) self.send_message(report)
def process(self): RT = rt.Rt(self.parameters.uri, self.parameters.user, self.parameters.password) if not RT.login(): raise ValueError('Login failed.') query = RT.search(Queue=self.parameters.search_queue, Subject__like=self.parameters.search_subject_like, Owner=self.parameters.search_owner, Status=self.parameters.search_status) self.logger.info('{} results on search query.'.format(len(query))) for ticket in query: ticket_id = int(ticket['id'].split('/')[1]) self.logger.debug('Process ticket {}.'.format(ticket_id)) for (att_id, att_name, _, _) in RT.get_attachments(ticket_id): if re.search(self.parameters.attachment_regex, att_name): self.logger.debug('Found attachment {}: {!r}.' ''.format(att_id, att_name)) break else: self.logger.debug('No matching attachement name found.') continue attachment = RT.get_attachment_content(ticket_id, att_id) if self.parameters.unzip_attachment: file_obj = io.BytesIO(attachment) zipped = zipfile.ZipFile(file_obj) raw = zipped.read(zipped.namelist()[0]) else: raw = attachment if self.parameters.gnupg_decrypt: raw = str( self.gpg.decrypt( raw, always_trust=self.parameters.gnupg_trust, passphrase=self.parameters.gnupg_passphrase)) self.logger.info('Successfully decrypted attachment.') self.logger.debug(raw) report = Report() report.add("raw", raw, sanitize=True) report.add("rtir_id", ticket_id, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.send_message(report) if self.parameters.take_ticket: RT.edit_ticket(ticket_id, Owner=self.parameters.user)
def process(self): self.logger.info("Downloading report through API") otx = OTXv2(self.parameters.api_key) pulses = otx.getall() self.logger.info("Report downloaded.") report = Report() report.add("raw", json.dumps(pulses)) report.add("feed.name", self.parameters.feed) report.add("feed.accuracy", self.parameters.accuracy) self.send_message(report)
def on_message(self, headers, message): self.n6stomper.logger.debug('Receive message ' '{!r}...'.format(message[:500])) report = Report() report.add("raw", message.rstrip()) report.add( "feed.url", "stomp://" + self.n6stomper.parameters.server + ":" + str(self.n6stomper.parameters.port) + "/" + self.n6stomper.parameters.exchange) self.n6stomper.send_message(report) self.n6stomper.logger.debug('Receiving Message.')
def on_receive(self, data): for line in data.decode().splitlines(): line = line.strip() if line == "": continue report = Report() report.add("raw", str(line)) report.add("feed.name", self.parameters.feed) report.add("feed.accuracy", self.parameters.accuracy) self.send_message(report)
def process(self): # Grab the events from MISP misp_result = self.misp.search( tags=self.parameters.misp_tag_to_process) # Process the response and events if 'response' in misp_result: # Extract the MISP event details misp_events = list() for result in misp_result['response']: misp_events.append(result['Event']) # Send the results to the parser report = Report() report.add('raw', json.dumps(misp_events, sort_keys=True)) report.add('feed.name', self.parameters.feed) report.add('feed.url', self.parameters.misp_url) report.add('feed.accuracy', self.parameters.accuracy) self.send_message(report) # Finally, update the tags on the MISP events. # Note PyMISP does not currently support this so we use # the API URLs directly with the requests module. session = requests.Session() session.headers.update({ 'Authorization': self.misp.key, 'Accept': 'application/json', 'Content-Type': 'application/json', }) post_data = { 'request': { 'Event': { 'tag': None, 'id': None, } } } for misp_event in misp_events: post_data['request']['Event']['id'] = misp_event['id'] # Remove the 'to be processed' tag tag = self.parameters.misp_tag_to_process post_data['request']['Event']['tag'] = tag session.post(self.misp_del_tag_url, data=json.dumps(post_data)) # Add a 'processed' tag to the event tag = self.parameters.misp_tag_processed post_data['request']['Event']['tag'] = tag session.post(self.misp_add_tag_url, data=json.dumps(post_data))
def process(self): self.logger.info("Downloading report through API") otx = OTXv2(self.parameters.api_key) pulses = otx.getall() self.logger.info("Report downloaded.") report = Report() report.add("raw", json.dumps(pulses), sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.send_message(report)
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl) emails = mailbox.messages(folder=self.parameters.mail_folder, unread=True) if emails: for uid, message in emails: if (self.parameters.mail_subject_regex and not re.search( self.parameters.mail_subject_regex, message.subject)): continue self.logger.info("Reading email report") for attach in message.attachments: if not attach: continue # remove quote marks from filename attach_name = attach['filename'][1:len(attach['filename'] ) - 1] if re.search(self.parameters.mail_attach_regex, attach_name): if self.parameters.mail_attach_unzip: zipped = zipfile.ZipFile(attach['content']) raw_report = zipped.read(zipped.namelist()[0]) else: raw_report = attach['content'].read() report = Report() report.add("raw", raw_report, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.send_message(report) mailbox.mark_seen(uid) self.logger.info("Email report read")
def on_message(self, headers, message): self.n6stomper.logger.debug('Receive message ' '{!r}...'.format(message[:500])) report = Report() report.add("raw", message.rstrip(), sanitize=True) report.add("feed.name", self.n6stomper.parameters.feed, sanitize=True) report.add("feed.url", "stomp://" + self.n6stomper.parameters.server + ":" + str(self.n6stomper.parameters.port) + "/" + self.n6stomper.parameters.exchange, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.n6stomper.send_message(report) self.n6stomper.logger.debug('Receiving Message.')
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl) emails = mailbox.messages(folder=self.parameters.folder, unread=True) if emails: for uid, message in emails: if (self.parameters.subject_regex and not re.search( self.parameters.subject_regex, message.subject)): continue self.logger.info("Reading email report") for attach in message.attachments: if not attach: continue # remove quote marks from filename attach_name = attach['filename'][1:len(attach['filename'] ) - 1] if re.search(self.parameters.attach_regex, attach_name): if self.parameters.attach_unzip: zipped = zipfile.ZipFile(attach['content']) raw_report = zipped.read(zipped.namelist()[0]) else: raw_report = attach['content'].read() report = Report() report.add("raw", raw_report) report.add("feed.name", self.parameters.feed) report.add("feed.accuracy", self.parameters.accuracy) self.send_message(report) # Only mark read if message relevant to this instance, # so other instances watching this mailbox will still # check it. mailbox.mark_seen(uid) self.logger.info("Email report read") mailbox.logout()
def process(self): try: req = requests.get(self.parameters.url, stream=True) except requests.exceptions.ConnectionError: raise ValueError('Connection Failed.') else: for line in req.iter_lines(): if self.parameters.strip_lines: line = line.strip() if not line: # filter out keep-alive new lines and empty lines continue report = Report() report.add("raw", decode(line)) self.send_message(report) self.logger.info('Stream stopped.')
def process(self): self.logger.info("Downloading report through API") http_proxy = getattr(self.parameters, 'http_proxy', None) https_proxy = getattr(self.parameters, 'http_ssl_proxy', None) proxy = None if http_proxy and https_proxy: proxy = {'http': http_proxy, 'https': https_proxy} api = BluelivAPI(base_url='https://freeapi.blueliv.com', token=self.parameters.api_key, log_level=logging.INFO, proxy=proxy) response = api.crime_servers.online() self.logger.info("Report downloaded.") report = Report() report.add("raw", json.dumps([item for item in response.items])) self.send_message(report)
def test_generate_reports_with_chunking_no_header(self): """Test generate_reports with chunking and not copying the header""" template = Report(harmonization=HARM) template.add("feed.name", "test_generate_reports_with_chunking") chunk_size = 1000 # This test only makes sense if the test data actually is longer # than the chunk size self.assertTrue(chunk_size < len(csv_test_data)) decoded_chunks = [] for report in generate_reports(template, io.BytesIO(csv_test_data), chunk_size=chunk_size, copy_header_line=False): self.assertEqual(report["feed.name"], "test_generate_reports_with_chunking") decoded_chunks.append(base64.b64decode(report["raw"])) self.assertEqual(b"".join(decoded_chunks), csv_test_data)
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl) emails = mailbox.messages(folder=self.parameters.mail_folder, unread=True) if emails: for uid, message in emails: if (self.parameters.mail_subject_regex and not re.search( self.parameters.mail_subject_regex, message.subject)): continue self.logger.info("Reading email report") for body in message.body['plain']: match = re.search(self.parameters.mail_url_regex, body) if match: url = match.group() self.logger.info("Downloading report from %s" % url) resp = requests.get(url=url) if resp.status_code // 100 != 2: raise ValueError( 'HTTP response status code was {}.' ''.format(resp.status_code)) self.logger.info("Report downloaded.") report = Report() report.add("raw", resp.content) report.add("feed.name", self.parameters.feed) report.add("feed.accuracy", self.parameters.accuracy) self.send_message(report) mailbox.mark_seen(uid) self.logger.info("Email report read")
def process(self): self.logger.debug("Started looking for files.") if os.path.isdir(self.parameters.path): p = os.path.abspath(self.parameters.path) # iterate over all files in dir for f in os.listdir(p): filename = os.path.join(p, f) if os.path.isfile(filename): if fnmatch.fnmatch(f, '*' + self.parameters.postfix): self.logger.info("Processing file %r." % filename) with open(filename, 'r') as f: report = Report() report.add("raw", f.read()) report.add("feed.name", self.parameters.feed) report.add("feed.url", "file://localhost%s" % filename) report.add("feed.accuracy", self.parameters.accuracy) self.send_message(report) if self.parameters.delete_file: try: os.remove(filename) self.logger.debug("Deleted file: %r." % filename) except PermissionError: self.logger.error("Could not delete file %r." % filename) self.logger.info( "Maybe I don't have sufficient rights on that file?" ) self.logger.error( "Stopping now, to prevent reading this file again." ) self.stop()
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl) emails = mailbox.messages(folder=self.parameters.mail_folder, unread=True) if emails: for uid, message in emails: if (self.parameters.mail_subject_regex and not re.search(self.parameters.mail_subject_regex, message.subject)): continue self.logger.info("Reading email report") for body in message.body['plain']: match = re.search(self.parameters.mail_url_regex, body) if match: url = match.group() self.logger.info("Downloading report from %s" % url) raw_report = fetch_url(url, timeout=60.0, chunk_size=16384) self.logger.info("Report downloaded.") report = Report() report.add("raw", raw_report, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.send_message(report) mailbox.mark_seen(uid) self.logger.info("Email report read")
def process(self): self.logger.info("Downloading report from %s" % self.parameters.http_url) resp = requests.get(url=self.parameters.http_url, auth=self.auth, proxies=self.proxy, headers=self.http_header, verify=self.http_verify_cert) if resp.status_code // 100 != 2: raise ValueError('HTTP response status code was {}.' ''.format(resp.status_code)) self.logger.info("Report downloaded.") report = Report() report.add("raw", resp.text, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.url", self.parameters.http_url, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.send_message(report)
def log_message(self, msg): if self.parameters.pass_full_xml: body = str(msg) else: if self.parameters.strip_message: body = msg['body'].strip() else: body = msg['body'] if len(body) > 400: tmp_body = body[:397] + '...' else: tmp_body = body self.logger.debug("Received Stanza: %r from %r", tmp_body, msg['from']) raw_msg = body # Read msg-body and add as raw to a new report. # now it's up to a parser to do the interpretation of the message. if raw_msg: report = Report() report.add("raw", raw_msg) self.send_message(report)
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, utils.base64_decode(self.parameters.mail_password), self.parameters.mail_ssl) self.logger.info("Connected to mail server") emails = mailbox.messages(folder=self.parameters.folder, unread=True) try: if emails: self.logger.info("Parsing emails in mailbox") for uid, message in emails: if self.parameters.subject_regex and not re.search(self.parameters.subject_regex, message.subject): continue self.logger.info("Reading email report") if hasattr(message,'attachments') and message.attachments: for attach in message.attachments: if not attach: continue attach_name = attach['filename'][1:len(attach['filename'])-1] # remove quote marks from filename if re.search(self.parameters.attach_regex, attach_name): self.logger.info("Parsing attachment") if self.parameters.attach_unzip: zipped = zipfile.ZipFile(attach['content']) raw_report = zipped.read(zipped.namelist()[0]) else: raw_report = attach['content'].read() self.logger.info('content read') report = Report() report.add("raw", raw_report, sanitize=True) report.add("feed.name", self.parameters.feed,sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) time_observation = DateTime().generate_datetime_now() #report.add('time.observation', time_observation) report.add('feed.reportname', message.subject, sanitize=True) self.logger.info('rocking in a free world') self.send_message(report) self.logger.info('just some administration left') mailbox.mark_seen(uid) self.logger.info("Email report read") else: # If no attachment, read from url # update way of fetching from url to new way in http/ self.logger.info("No attachment found, trying collecting from URL") for body in message.body['plain']: self.logger.info("Parsing message body") match = re.search(self.parameters.url_regex, body) if match: url = match.group() self.logger.info("Downloading report from %s" % url) resp = requests.get(url=url) if resp.status_code // 100 != 2: raise ValueError('HTTP response status code was {}.' ''.format(resp.status_code)) raw_report = resp.content self.logger.info("Report downloaded.") report = Report() report.add("raw", raw_report, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) self.logger.info("all is well sir") time_observation = DateTime().generate_datetime_now() #report.add('time.observation', time_observation, sanitize=True) report.add('feed.reportname', message.subject, sanitize=True) self.send_message(report) mailbox.mark_seen(uid) self.logger.info("Email report read") except: self.logger.info("ERROR with the collector ---")
def muc_message(self, msg): report = Report() report.add("raw", msg['body']) report.add("feed.name", self.bot.parameters.feed) report.add("feed.accuracy", self.bot.parameters.accuracy) self.bot.send_message(report)
def process(self): report = Report() if self.parameters.raw: report['raw'] = 'test' self.send_message(report)
def process(self): RT = rt.Rt(self.parameters.uri, self.parameters.user, self.parameters.password) if not RT.login(): raise ValueError('Login failed.') query = RT.search(Queue=self.parameters.search_queue, Subject__like=self.parameters.search_subject_like, Owner=self.parameters.search_owner, Status=self.parameters.search_status) self.logger.info('{} results on search query.'.format(len(query))) for ticket in query: ticket_id = int(ticket['id'].split('/')[1]) self.logger.debug('Process ticket {}.'.format(ticket_id)) content = 'attachment' for (att_id, att_name, _, _) in RT.get_attachments(ticket_id): if re.search(self.parameters.attachment_regex, att_name): self.logger.debug('Found attachment {}: {!r}.' ''.format(att_id, att_name)) break else: text = RT.get_history(ticket_id)[0]['Content'] urlmatch = re.search(self.parameters.url_regex, text) if urlmatch: content = 'url' url = urlmatch.group(0) else: self.logger.debug('No matching attachment or URL found.') continue if content == 'attachment': attachment = RT.get_attachment_content(ticket_id, att_id) if self.parameters.unzip_attachment: file_obj = io.BytesIO(attachment) zipped = zipfile.ZipFile(file_obj) raw = zipped.read(zipped.namelist()[0]) else: raw = attachment else: resp = requests.get(url=url, proxies=self.proxy, headers=self.http_header, verify=self.http_verify_cert) if resp.status_code // 100 != 2: self.logger.error('HTTP response status code was {}.' ''.format(resp.status_code)) self.logger.info("Report downloaded.") raw = resp.text report = Report() report.add("raw", raw, sanitize=True) report.add("rtir_id", ticket_id, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) self.send_message(report) if self.parameters.take_ticket: try: RT.take(ticket_id) except rt.BadRequest: self.logger.exception("Could not take ticket %s." % ticket_id) if self.parameters.set_status: RT.edit_ticket(ticket_id, status=self.parameters.set_status)
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl) emails = mailbox.messages(folder=self.parameters.folder, unread=True) if emails: for uid, message in emails: if (self.parameters.subject_regex and not re.search( self.parameters.subject_regex, message.subject)): continue self.logger.info("Reading email report") for body in message.body['plain']: match = re.search(self.parameters.url_regex, str(body)) if match: url = match.group() url = url.strip( ) # strip leading and trailing spaces, newlines and carriage returns # Build request self.http_header = getattr(self.parameters, 'http_header', {}) self.http_verify_cert = getattr( self.parameters, 'http_verify_cert', True) if hasattr(self.parameters, 'http_user') and hasattr( self.parameters, 'http_password'): self.auth = (self.parameters.http_user, self.parameters.http_password) else: self.auth = None http_proxy = getattr(self.parameters, 'http_proxy', None) https_proxy = getattr(self.parameters, 'http_ssl_proxy', None) if http_proxy and https_proxy: self.proxy = { 'http': http_proxy, 'https': https_proxy } else: self.proxy = None self.http_header[ 'User-agent'] = self.parameters.http_user_agent self.logger.info("Downloading report from %s" % url) resp = requests.get(url=url, auth=self.auth, proxies=self.proxy, headers=self.http_header, verify=self.http_verify_cert) if resp.status_code // 100 != 2: raise ValueError( 'HTTP response status code was {}.' ''.format(resp.status_code)) self.logger.info("Report downloaded.") report = Report() report.add("raw", resp.content) report.add("feed.name", self.parameters.feed) report.add("feed.accuracy", self.parameters.accuracy) self.send_message(report) # Only mark read if message relevant to this instance, # so other instances watching this mailbox will still # check it. mailbox.mark_seen(uid) self.logger.info("Email report read")