def process(self): # Grab the events from MISP misp_result = self.misp.search( tags=self.parameters.misp_tag_to_process) # Process the response and events if 'response' in misp_result: # Extract the MISP event details for e in misp_result['response']: misp_event = e['Event'] # Send the results to the parser report = Report() report.add('raw', json.dumps(misp_event, sort_keys=True)) report.add('feed.name', self.parameters.feed) report.add('feed.url', self.parameters.misp_url) report.add('feed.accuracy', self.parameters.accuracy) self.send_message(report) # Finally, update the tags on the MISP events. # Note PyMISP does not currently support this so we use # the API URLs directly with the requests module. for misp_event in misp_result['response']: # Remove the 'to be processed' tag self.misp.remove_tag(misp_event, self.parameters.misp_tag_to_process) # Add a 'processed' tag to the event self.misp.add_tag(misp_event, self.parameters.misp_tag_processed)
def process(self): self.logger.debug("Started looking for files.") if os.path.isdir(self.parameters.path): p = os.path.abspath(self.parameters.path) # iterate over all files in dir for f in os.listdir(p): filename = os.path.join(p, f) if os.path.isfile(filename): if fnmatch.fnmatch(f, '*' + self.parameters.postfix): self.logger.info("Processing file %r." % filename) with open(filename, 'r') as f: report = Report() report.add("raw", f.read()) report.add("feed.url", "file://localhost%s" % filename) self.send_message(report) if self.parameters.delete_file: try: os.remove(filename) self.logger.debug("Deleted file: %r." % filename) except PermissionError: self.logger.error("Could not delete file %r." % filename) self.logger.info("Maybe I don't have sufficient rights on that file?") self.logger.error("Stopping now, to prevent reading this file again.") self.stop()
def process(self): self.logger.info("Downloading report from %s" % self.parameters.ftps_host + ':' + str(self.parameters.ftps_port)) ftps = FTPS() ftps.connect(host=self.parameters.ftps_host, port=self.parameters.ftps_port) if hasattr(self.parameters, 'ftps_username') \ and hasattr(self.parameters, 'ftps_password'): ftps.login(user=self.parameters.ftps_username, passwd=self.parameters.ftps_password) ftps.prot_p() cwd = '/' if hasattr(self.parameters, 'ftps_directory'): self.logger.info('Changing working directory to: ' + self.parameters.ftps_directory) cwd = self.parameters.ftps_directory ftps.cwd(cwd) filemask = '*' if hasattr(self.parameters, 'ftps_file'): self.logger.info('Setting filemask to to: ' + self.parameters.ftps_file) filemask = self.parameters.ftps_file mem = io.BytesIO() files = fnmatch.filter(ftps.nlst(), filemask) self.logger.info('Found following files in the directory: ' + repr(files)) self.logger.info('Looking for latest file matching following pattern: ' + filemask) if files: self.logger.info('Retrieving file: ' + files[-1]) ftps.retrbinary("RETR " + files[-1], mem.write) else: self.logger.error("No file found, terminating download") return self.logger.info("Report downloaded.") raw_reports = [] try: zfp = zipfile.ZipFile(mem, "r") except zipfile.BadZipfile: raw_reports.append(mem.getvalue()) else: self.logger.info('Downloaded zip file, extracting following files: ' + ', '.join(zfp.namelist())) for filename in zfp.namelist(): raw_reports.append(zfp.read(filename)) for raw_report in raw_reports: report = Report() report.add("raw", raw_report, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.url", 'ftps://' + self.parameters.ftps_host + ':' + str(self.parameters.ftps_port), sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) self.send_message(report)
def process(self): # Grab the events from MISP misp_result = self.misp.search( tags=self.parameters.misp_tag_to_process ) # Process the response and events if 'response' in misp_result: # Extract the MISP event details for e in misp_result['response']: misp_event = e['Event'] # Send the results to the parser report = Report() report.add('raw', json.dumps(misp_event, sort_keys=True)) report.add('feed.url', self.parameters.misp_url) self.send_message(report) # Finally, update the tags on the MISP events. # Note PyMISP does not currently support this so we use # the API URLs directly with the requests module. for misp_event in misp_result['response']: # Remove the 'to be processed' tag self.misp.remove_tag(misp_event, self.parameters.misp_tag_to_process) # Add a 'processed' tag to the event self.misp.add_tag(misp_event, self.parameters.misp_tag_processed)
def process(self): self.logger.info("Downloading report from %s" % self.parameters.http_url) resp = requests.get(url=self.parameters.http_url, auth=self.auth, proxies=self.proxy, headers=self.http_header, verify=self.http_verify_cert) if resp.status_code // 100 != 2: raise ValueError('HTTP response status code was {}.' ''.format(resp.status_code)) self.logger.info("Report downloaded.") raw_reports = [] try: zfp = zipfile.ZipFile(io.BytesIO(resp.content), "r") except zipfile.BadZipfile: raw_reports.append(resp.text) else: self.logger.info('Downloaded zip file, extracting following files:' ' ' + ', '.join(zfp.namelist())) for filename in zfp.namelist(): raw_reports.append(zfp.read(filename)) for raw_report in raw_reports: report = Report() report.add("raw", raw_report) report.add("feed.name", self.parameters.feed) report.add("feed.url", self.parameters.http_url) report.add("feed.accuracy", self.parameters.accuracy) self.send_message(report)
def test_generate_reports_with_chunking_and_copying_header(self): """Test generate_reports with chunking and copying the header""" chunk_size = 1000 # This test only makes sense if the test data actually is longer # than the chunk size self.assertTrue(chunk_size < len(csv_test_data)) template = Report(harmonization=HARM) template.add("feed.name", "test_generate_reports_with_chunking_and_header") observation_time = template["time.observation"] original_header = io.BytesIO(csv_test_data).readline() decoded_chunks = [original_header] for report in generate_reports(template, io.BytesIO(csv_test_data), chunk_size=chunk_size, copy_header_line=True): self.assertEqual(report["feed.name"], "test_generate_reports_with_chunking_and_header") self.assertEqual(report["time.observation"], observation_time) report_data = io.BytesIO(base64.b64decode(report["raw"])) header = report_data.readline() chunk = report_data.read() self.assertEqual(original_header, header) decoded_chunks.append(chunk) self.assertEqual(b"".join(decoded_chunks), csv_test_data)
def process(self): self.logger.info("Downloading report from %s" % self.parameters.http_url) resp = requests.get(url=self.parameters.http_url, auth=self.auth, proxies=self.proxy, headers=self.http_header, verify=self.http_verify_cert) if resp.status_code // 100 != 2: raise ValueError('HTTP response status code was {}.' ''.format(resp.status_code)) self.logger.info("Report downloaded.") raw_reports = [] try: zfp = zipfile.ZipFile(io.BytesIO(resp.content), "r") except zipfile.BadZipfile: raw_reports.append(resp.text) else: self.logger.info('Downloaded zip file, extracting following files:' ' ' + ', '.join(zfp.namelist())) for filename in zfp.namelist(): raw_reports.append(zfp.read(filename)) for raw_report in raw_reports: report = Report() report.add("raw", raw_report) report.add("feed.url", self.parameters.http_url) self.send_message(report)
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl) emails = mailbox.messages(folder=self.parameters.folder, unread=True) if emails: for uid, message in emails: if self.parameters.subject_regex and not re.search(self.parameters.subject_regex, message.subject): continue self.logger.info("Reading email report") for body in message.body['plain']: match = re.search(self.parameters.url_regex, body) if match: url = match.group() self.logger.info("Downloading report from %s" % url) raw_report = fetch_url(url, timeout = 60.0, chunk_size = 16384) self.logger.info("Report downloaded.") report = Report() report.add("raw", raw_report, sanitize=True) self.send_message(report) mailbox.mark_seen(uid) self.logger.info("Email report read")
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl) emails = mailbox.messages(folder=self.parameters.mail_folder, unread=True) reflags = re.IGNORECASE if getattr(self.parameters, "mail_subject_ignorecase", False) else 0 if emails: for uid, message in emails: if (self.parameters.mail_subject_regex and not re.search(self.parameters.mail_subject_regex, message.subject, flags=reflags)): self.logger.info("Subject regex not matched: '%s' in '%s'", self.parameters.mail_subject_regex, message.subject) continue self.logger.info("Reading email report") report = Report() report.add("raw", message.body['plain'][0], sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) self.send_message(report) mailbox.mark_seen(uid) self.logger.info("Email report read")
def process(self): self.logger.info("Downloading report from %s." % (self.parameters.ftp_host + ':' + str(self.parameters.ftp_port))) ftps = FTPS() ftps.connect(host=self.parameters.ftps_host, port=self.parameters.ftps_port) if hasattr(self.parameters, 'ftps_username') \ and hasattr(self.parameters, 'ftps_password'): ftps.login(user=self.parameters.ftps_username, passwd=self.parameters.ftps_password) ftps.prot_p() cwd = '/' if hasattr(self.parameters, 'ftps_directory'): self.logger.debug('Changing working directory to: %r.' '' % self.parameters.ftp_directory) cwd = self.parameters.ftps_directory ftps.cwd(cwd) filemask = '*' if hasattr(self.parameters, 'ftps_file'): self.logger.debug('Setting filemask to to: %r.' '' % self.parameters.ftp_file) filemask = self.parameters.ftps_file mem = io.BytesIO() files = fnmatch.filter(ftps.nlst(), filemask) if files: self.logger.info('Retrieving file: ' + files[-1]) ftps.retrbinary("RETR " + files[-1], mem.write) else: self.logger.error("No file found, terminating download") return self.logger.info("Report downloaded.") raw_reports = [] try: zfp = zipfile.ZipFile(mem, "r") except zipfile.BadZipfile: raw_reports.append(mem.getvalue()) else: self.logger.info('Downloaded zip file, extracting following files: %r' '' % zfp.namelist()) for filename in zfp.namelist(): raw_reports.append(zfp.read(filename)) for raw_report in raw_reports: report = Report() report.add("raw", raw_report, sanitize=True) report.add("feed.url", 'ftps://' + self.parameters.ftps_host + ':' + str(self.parameters.ftps_port), sanitize=True) self.send_message(report)
def process(self): self.logger.info("Downloading report through API") https_proxy = getattr(self.parameters, 'http_ssl_proxy', None) otx = OTXv2(self.parameters.api_key, proxy=https_proxy) pulses = otx.getall() self.logger.info("Report downloaded.") report = Report() report.add("raw", json.dumps(pulses)) self.send_message(report)
def on_message(self, headers, message): self.n6stomper.logger.debug('Receive message ' '{!r}...'.format(message[:500])) report = Report() report.add("raw", message.rstrip()) report.add("feed.url", "stomp://" + self.n6stomper.parameters.server + ":" + str(self.n6stomper.parameters.port) + "/" + self.n6stomper.parameters.exchange) self.n6stomper.send_message(report)
def on_receive(self, data): for line in data.decode().splitlines(): line = line.strip() if line == "": continue report = Report() report.add("raw", str(line)) self.send_message(report)
def test_generate_reports_no_chunking(self): """Test generate_reports with chunking disabled""" template = Report(harmonization=HARM) template.add("feed.name", "test_generate_reports_no_chunking") [report] = list(generate_reports(template, io.BytesIO(csv_test_data), chunk_size=None, copy_header_line=False)) self.assertEqual(report["feed.name"], "test_generate_reports_no_chunking") self.assertEqual(base64.b64decode(report["raw"]), csv_test_data)
def on_receive(self, data): for line in data.decode().splitlines(): line = line.strip() if line == "": continue report = Report() report.add("raw", line) report.add("feed.url", self.parameters.http_url) self.send_message(report)
def process(self): # Grab the events from MISP misp_result = self.misp.search( tags=self.parameters.misp_tag_to_process) # Process the response and events if 'response' in misp_result: # Extract the MISP event details misp_events = list() for result in misp_result['response']: misp_events.append(result['Event']) # Send the results to the parser report = Report() report.add('raw', json.dumps(misp_events, sort_keys=True)) report.add('feed.name', self.parameters.feed) report.add('feed.url', self.parameters.misp_url) report.add('feed.accuracy', self.parameters.accuracy) self.send_message(report) # Finally, update the tags on the MISP events. # Note PyMISP does not currently support this so we use # the API URLs directly with the requests module. session = requests.Session() session.headers.update({ 'Authorization': self.misp.key, 'Accept': 'application/json', 'Content-Type': 'application/json', }) post_data = { 'request': { 'Event': { 'tag': None, 'id': None, } } } for misp_event in misp_events: post_data['request']['Event']['id'] = misp_event['id'] # Remove the 'to be processed' tag tag = self.parameters.misp_tag_to_process post_data['request']['Event']['tag'] = tag session.post(self.misp_del_tag_url, data=json.dumps(post_data)) # Add a 'processed' tag to the event tag = self.parameters.misp_tag_processed post_data['request']['Event']['tag'] = tag session.post(self.misp_add_tag_url, data=json.dumps(post_data))
def process(self): self.logger.info("Downloading report through API") otx = OTXv2(self.parameters.api_key) pulses = otx.getall() self.logger.info("Report downloaded.") report = Report() report.add("raw", json.dumps(pulses), sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.send_message(report)
def on_receive(self, data): for line in data.decode().splitlines(): line = line.strip() if line == "": continue report = Report() report.add("raw", line) report.add("feed.name", self.parameters.feed) report.add("feed.accuracy", self.parameters.accuracy) report.add("feed.url", self.parameters.http_url) self.send_message(report)
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl) emails = mailbox.messages(folder=self.parameters.mail_folder, unread=True) if emails: for uid, message in emails: if (self.parameters.mail_subject_regex and not re.search( self.parameters.mail_subject_regex, message.subject)): continue self.logger.info("Reading email report") for attach in message.attachments: if not attach: continue # remove quote marks from filename attach_name = attach['filename'][1:len(attach['filename'] ) - 1] if re.search(self.parameters.mail_attach_regex, attach_name): if self.parameters.mail_attach_unzip: zipped = zipfile.ZipFile(attach['content']) raw_report = zipped.read(zipped.namelist()[0]) else: raw_report = attach['content'].read() report = Report() report.add("raw", raw_report, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.send_message(report) mailbox.mark_seen(uid) self.logger.info("Email report read")
def process(self): self.logger.info("Downloading report from %s" % self.parameters.url) raw_report = fetch_url( self.parameters.url, timeout = 60.0, chunk_size = 16384, http_proxy=self.parameters.http_proxy, https_proxy=self.parameters.https_proxy ) self.logger.info("Report downloaded.") report = Report() report.add("raw", raw_report, sanitize=True) self.send_message(report)
def on_message(self, headers, message): self.n6stomper.logger.debug('Receive message ' '{!r}...'.format(message[:500])) report = Report() report.add("raw", message.rstrip(), sanitize=True) report.add("feed.name", self.n6stomper.parameters.feed, sanitize=True) report.add("feed.url", "stomp://" + self.n6stomper.parameters.server + ":" + str(self.n6stomper.parameters.port) + "/" + self.n6stomper.parameters.exchange, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.n6stomper.send_message(report) self.n6stomper.logger.debug('Receiving Message.')
def generate_reports(report_template: Report, infile: BinaryIO, chunk_size: Optional[int], copy_header_line: bool) -> Generator[Report, None, None]: """Generate reports from a template and input file, optionally split into chunks. If chunk_size is None, a single report is generated with the entire contents of infile as the raw data. Otherwise chunk_size should be an integer giving the maximum number of bytes in a chunk. The data read from infile is then split into chunks of this size at newline characters (see read_delimited_chunks). For each of the chunks, this function yields a copy of the report_template with that chunk as the value of the raw attribute. When splitting the data into chunks, if copy_header_line is true, the first line the file is read before chunking and then prepended to each of the chunks. This is particularly useful when splitting CSV files. The infile should be a file-like object. generate_reports uses only two methods, readline and read, with readline only called once and only if copy_header_line is true. Both methods should return bytes objects. Params: report_template: report used as template for all yielded copies infile: stream to read from chunk_size: maximum size of each chunk copy_header_line: copy the first line of the infile to each chunk Yields: report: a Report object holding the chunk in the raw field """ if chunk_size is None: report = report_template.copy() data = infile.read() if data: report.add("raw", data, overwrite=True) yield report else: header = b"" if copy_header_line: header = infile.readline() for chunk in read_delimited_chunks(infile, chunk_size): report = report_template.copy() report.add("raw", header + chunk, overwrite=True) yield report
def process(self): # Grab the events from MISP misp_result = self.misp.search( tags=self.parameters.misp_tag_to_process ) # Process the response and events if 'response' in misp_result: # Extract the MISP event details misp_events = list() for result in misp_result['response']: misp_events.append(result['Event']) # Send the results to the parser report = Report() report.add('raw', json.dumps(misp_events, sort_keys=True)) report.add('feed.name', self.parameters.feed) report.add('feed.url', self.parameters.misp_url) report.add('feed.accuracy', self.parameters.accuracy) self.send_message(report) # Finally, update the tags on the MISP events. # Note PyMISP does not currently support this so we use # the API URLs directly with the requests module. session = requests.Session() session.headers.update({ 'Authorization': self.misp.key, 'Accept': 'application/json', 'Content-Type': 'application/json', }) post_data = { 'request': { 'Event': { 'tag': None, 'id': None, }}} for misp_event in misp_events: post_data['request']['Event']['id'] = misp_event['id'] # Remove the 'to be processed' tag tag = self.parameters.misp_tag_to_process post_data['request']['Event']['tag'] = tag session.post(self.misp_del_tag_url, data=json.dumps(post_data)) # Add a 'processed' tag to the event tag = self.parameters.misp_tag_processed post_data['request']['Event']['tag'] = tag session.post(self.misp_add_tag_url, data=json.dumps(post_data))
def on_receive(self, data): for line in data.split("\n"): line = line.strip() if line == "": continue report = Report() report.add("raw", str(line), sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add("time.observation", time_observation, sanitize=True) self.send_message(report)
def on_message(self, headers, message): self.n6stomper.logger.debug('Receive message ' '{!r}...'.format(message[:500])) report = Report() report.add("raw", message.rstrip(), sanitize=True) report.add("feed.name", self.n6stomper.parameters.feed, sanitize=True) report.add("feed.url", "stomp://" + self.n6stomper.parameters.server + ":" + self.n6stomper.parameters.port + "/" + self.n6stomper.parameters.exchange, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.n6stomper.send_message(report) self.logger.debug('Receiving Message.')
def process(self): try: req = requests.get(self.parameters.url, stream=True) except requests.exceptions.ConnectionError: raise ValueError('Connection Failed.') else: for line in req.iter_lines(): if self.parameters.strip_lines: line = line.strip() if not line: # filter out keep-alive new lines and empty lines continue report = Report() report.add("raw", decode(line)) self.send_message(report) self.logger.info('Stream stopped.')
def process(self): self.logger.info("Downloading report through API") http_proxy = getattr(self.parameters, 'http_proxy', None) https_proxy = getattr(self.parameters, 'http_ssl_proxy', None) proxy = None if http_proxy and https_proxy: proxy = {'http': http_proxy, 'https': https_proxy} api = BluelivAPI(base_url='https://freeapi.blueliv.com', token=self.parameters.api_key, log_level=logging.INFO, proxy=proxy) response = api.crime_servers.online() self.logger.info("Report downloaded.") report = Report() report.add("raw", json.dumps([item for item in response.items])) self.send_message(report)
def process(self): self.logger.info("Downloading report through API") otx = OTXv2(self.parameters.api_key) pulses = otx.getall() self.logger.info("Report downloaded.") report = Report() report.add("raw", json.dumps(pulses)) report.add("feed.name", self.parameters.feed) report.add("feed.accuracy", self.parameters.accuracy) self.send_message(report)
def on_receive(self, data): for line in data.split('\n'): line = line.strip() if line == "": continue report = Report() report.add("raw", str(line)) report.add("feed.name", self.parameters.feed) report.add("feed.accuracy", self.parameters.accuracy) self.send_message(report)
def process(self): self.logger.debug("Downloading report through API.") http_proxy = getattr(self.parameters, 'http_proxy', None) https_proxy = getattr(self.parameters, 'http_ssl_proxy', None) proxy = None if http_proxy and https_proxy: proxy = {'http': http_proxy, 'https': https_proxy} api = BluelivAPI(base_url='https://freeapi.blueliv.com', token=self.parameters.api_key, log_level=logging.INFO, proxy=proxy) response = api.crime_servers.online() self.logger.info("Report downloaded.") report = Report() report.add("raw", json.dumps([item for item in response.items])) self.send_message(report)
def on_message(self, headers, message): self.n6stomper.logger.debug('Receive message ' '{!r}...'.format(message[:500])) report = Report() report.add("raw", message.rstrip()) report.add("feed.name", self.n6stomper.parameters.feed) report.add("feed.url", "stomp://" + self.n6stomper.parameters.server + ":" + str(self.n6stomper.parameters.port) + "/" + self.n6stomper.parameters.exchange) self.n6stomper.send_message(report) self.n6stomper.logger.debug('Receiving Message.')
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl) emails = mailbox.messages(folder=self.parameters.folder, unread=True) if emails: for uid, message in emails: if (self.parameters.subject_regex and not re.search(self.parameters.subject_regex, message.subject)): continue for attach in message.attachments: if not attach: continue # remove quote marks from filename attach_name = attach['filename'][ 1:len(attach['filename']) - 1] if re.search(self.parameters.attach_regex, attach_name): if self.parameters.attach_unzip: zipped = zipfile.ZipFile(attach['content']) raw_report = zipped.read(zipped.namelist()[0]) else: raw_report = attach['content'].read() report = Report() report.add("raw", raw_report) self.send_message(report) # Only mark read if message relevant to this instance, # so other instances watching this mailbox will still # check it. mailbox.mark_seen(uid) self.logger.debug("Email report read.") mailbox.logout()
def test_generate_reports_with_chunking_no_header(self): """Test generate_reports with chunking and not copying the header""" template = Report(harmonization=HARM) template.add("feed.name", "test_generate_reports_with_chunking") chunk_size = 1000 # This test only makes sense if the test data actually is longer # than the chunk size self.assertTrue(chunk_size < len(csv_test_data)) decoded_chunks = [] for report in generate_reports(template, io.BytesIO(csv_test_data), chunk_size=chunk_size, copy_header_line=False): self.assertEqual(report["feed.name"], "test_generate_reports_with_chunking") decoded_chunks.append(base64.b64decode(report["raw"])) self.assertEqual(b"".join(decoded_chunks), csv_test_data)
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl) emails = mailbox.messages(folder=self.parameters.mail_folder, unread=True) if emails: for uid, message in emails: if (self.parameters.mail_subject_regex and not re.search(self.parameters.mail_subject_regex, message.subject)): continue self.logger.info("Reading email report") for attach in message.attachments: if not attach: continue # remove quote marks from filename attach_name = attach['filename'][ 1:len(attach['filename']) - 1] if re.search(self.parameters.mail_attach_regex, attach_name): if self.parameters.mail_attach_unzip: zipped = zipfile.ZipFile(attach['content']) raw_report = zipped.read(zipped.namelist()[0]) else: raw_report = attach['content'].read() report = Report() report.add("raw", raw_report, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.send_message(report) mailbox.mark_seen(uid) self.logger.info("Email report read")
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl) emails = mailbox.messages(folder=self.parameters.mail_folder, unread=True) if emails: for uid, message in emails: if (self.parameters.mail_subject_regex and not re.search(self.parameters.mail_subject_regex, message.subject)): continue self.logger.info("Reading email report") for body in message.body['plain']: match = re.search(self.parameters.mail_url_regex, body) if match: url = match.group() self.logger.info("Downloading report from %s" % url) resp = requests.get(url=url) if resp.status_code // 100 != 2: raise ValueError('HTTP response status code was {}.' ''.format(resp.status_code)) self.logger.info("Report downloaded.") report = Report() report.add("raw", resp.content, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.send_message(report) mailbox.mark_seen(uid) self.logger.info("Email report read")
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl) emails = mailbox.messages(folder=self.parameters.mail_folder, unread=True) if emails: for uid, message in emails: if (self.parameters.mail_subject_regex and not re.search(self.parameters.mail_subject_regex, message.subject)): continue self.logger.info("Reading email report") for body in message.body['plain']: match = re.search(self.parameters.mail_url_regex, body) if match: url = match.group() self.logger.info("Downloading report from %s" % url) raw_report = fetch_url(url, timeout=60.0, chunk_size=16384) self.logger.info("Report downloaded.") report = Report() report.add("raw", raw_report, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.send_message(report) mailbox.mark_seen(uid) self.logger.info("Email report read")
def process(self): self.logger.debug("Started looking for files.") if os.path.isdir(self.parameters.path): p = os.path.abspath(self.parameters.path) # iterate over all files in dir for f in os.listdir(p): filename = os.path.join(p, f) if os.path.isfile(filename): if fnmatch.fnmatch(f, '*' + self.parameters.postfix): self.logger.info("Processing file %r." % filename) with open(filename, 'r') as f: report = Report() report.add("raw", f.read()) report.add("feed.name", self.parameters.feed) report.add("feed.url", "file://localhost%s" % filename) report.add("feed.accuracy", self.parameters.accuracy) self.send_message(report) if self.parameters.delete_file: try: os.remove(filename) self.logger.debug("Deleted file: %r." % filename) except PermissionError: self.logger.error("Could not delete file %r." % filename) self.logger.info( "Maybe I don't have sufficient rights on that file?" ) self.logger.error( "Stopping now, to prevent reading this file again." ) self.stop()
def on_receive(self, data): for line in data.decode().splitlines(): line = line.strip() if line == "": continue report = Report() report.add("raw", str(line)) report.add("feed.name", self.parameters.feed) report.add("feed.accuracy", self.parameters.accuracy) self.send_message(report)
def process(self): self.logger.info("Downloading report from %s" % self.parameters.http_url) resp = requests.get(url=self.parameters.http_url, auth=self.auth, proxies=self.proxy, headers=self.http_header, verify=self.http_verify_cert) if resp.status_code // 100 != 2: raise ValueError('HTTP response status code was {}.' ''.format(resp.status_code)) self.logger.info("Report downloaded.") report = Report() report.add("raw", resp.text, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.url", self.parameters.http_url, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.send_message(report)
def log_message(self, msg): if self.parameters.pass_full_xml: body = str(msg) else: if self.parameters.strip_message: body = msg['body'].strip() else: body = msg['body'] if len(body) > 400: tmp_body = body[:397] + '...' else: tmp_body = body self.logger.debug("Received Stanza: %r from %r", tmp_body, msg['from']) raw_msg = body # Read msg-body and add as raw to a new report. # now it's up to a parser to do the interpretation of the message. if raw_msg: report = Report() report.add("raw", raw_msg) self.send_message(report)
def log_message(self, msg): if self.parameters.pass_full_xml: body = str(msg) else: if self.parameters.strip_message: body = msg['body'].strip() else: body = msg['body'] if len(body) > 400: tmp_body = body[:397] + '...' else: tmp_body = body self.logger.debug("Received Stanza: %r from %r." % (tmp_body, msg['from'])) raw_msg = body # Read msg-body and add as raw to a new report. # now it's up to a parser to do the interpretation of the message. if raw_msg: report = Report() report.add("raw", raw_msg) self.send_message(report)
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl) emails = mailbox.messages(folder=self.parameters.folder, unread=True) if emails: for uid, message in emails: if (self.parameters.subject_regex and not re.search( self.parameters.subject_regex, message.subject)): continue self.logger.info("Reading email report") for attach in message.attachments: if not attach: continue # remove quote marks from filename attach_name = attach['filename'][1:len(attach['filename'] ) - 1] if re.search(self.parameters.attach_regex, attach_name): if self.parameters.attach_unzip: zipped = zipfile.ZipFile(attach['content']) raw_report = zipped.read(zipped.namelist()[0]) else: raw_report = attach['content'].read() report = Report() report.add("raw", raw_report) report.add("feed.name", self.parameters.feed) report.add("feed.accuracy", self.parameters.accuracy) self.send_message(report) # Only mark read if message relevant to this instance, # so other instances watching this mailbox will still # check it. mailbox.mark_seen(uid) self.logger.info("Email report read") mailbox.logout()
def process(self): self.logger.info("Downloading report through API") http_proxy = getattr(self.parameters, 'http_proxy', None) https_proxy = getattr(self.parameters, 'http_ssl_proxy', None) proxy = None if http_proxy and https_proxy: proxy = {'http': http_proxy, 'https': https_proxy} api = BluelivAPI(base_url='https://freeapi.blueliv.com', token=self.parameters.api_key, log_level=logging.INFO, proxy=proxy) response = api.crime_servers.online() self.logger.info("Report downloaded.") report = Report() report.add("raw", json.dumps([item for item in response.items]), sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.send_message(report)
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl) emails = mailbox.messages(folder=self.parameters.mail_folder, unread=True) if emails: for uid, message in emails: if (self.parameters.mail_subject_regex and not re.search( self.parameters.mail_subject_regex, message.subject)): continue self.logger.info("Reading email report") for body in message.body['plain']: match = re.search(self.parameters.mail_url_regex, body) if match: url = match.group() self.logger.info("Downloading report from %s" % url) resp = requests.get(url=url) if resp.status_code // 100 != 2: raise ValueError( 'HTTP response status code was {}.' ''.format(resp.status_code)) self.logger.info("Report downloaded.") report = Report() report.add("raw", resp.content) report.add("feed.name", self.parameters.feed) report.add("feed.accuracy", self.parameters.accuracy) self.send_message(report) mailbox.mark_seen(uid) self.logger.info("Email report read")
def process(self): RT = rt.Rt(self.parameters.uri, self.parameters.user, self.parameters.password) if not RT.login(): raise ValueError('Login failed.') query = RT.search(Queue=self.parameters.search_queue, Subject__like=self.parameters.search_subject_like, Owner=self.parameters.search_owner, Status=self.parameters.search_status) self.logger.info('{} results on search query.'.format(len(query))) for ticket in query: ticket_id = int(ticket['id'].split('/')[1]) self.logger.debug('Process ticket {}.'.format(ticket_id)) content = 'attachment' for (att_id, att_name, _, _) in RT.get_attachments(ticket_id): if re.search(self.parameters.attachment_regex, att_name): self.logger.debug('Found attachment {}: {!r}.' ''.format(att_id, att_name)) break else: text = RT.get_history(ticket_id)[0]['Content'] urlmatch = re.search(self.parameters.url_regex, text) if urlmatch: content = 'url' url = urlmatch.group(0) else: self.logger.debug('No matching attachment or URL found.') continue if content == 'attachment': attachment = RT.get_attachment_content(ticket_id, att_id) if self.parameters.unzip_attachment: file_obj = io.BytesIO(attachment) zipped = zipfile.ZipFile(file_obj) raw = zipped.read(zipped.namelist()[0]) else: raw = attachment else: resp = requests.get(url=url, proxies=self.proxy, headers=self.http_header, verify=self.http_verify_cert) if resp.status_code // 100 != 2: self.logger.error('HTTP response status code was {}.' ''.format(resp.status_code)) self.logger.info("Report downloaded.") raw = resp.text report = Report() report.add("raw", raw, sanitize=True) report.add("rtir_id", ticket_id, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) self.send_message(report) if self.parameters.take_ticket: try: RT.take(ticket_id) except rt.BadRequest: self.logger.exception("Could not take ticket %s." % ticket_id) if self.parameters.set_status: RT.edit_ticket(ticket_id, status=self.parameters.set_status)
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, utils.base64_decode(self.parameters.mail_password), self.parameters.mail_ssl) self.logger.info("Connected to mail server") emails = mailbox.messages(folder=self.parameters.folder, unread=True) try: if emails: self.logger.info("Parsing emails in mailbox") for uid, message in emails: if self.parameters.subject_regex and not re.search(self.parameters.subject_regex, message.subject): continue self.logger.info("Reading email report") if hasattr(message,'attachments') and message.attachments: for attach in message.attachments: if not attach: continue attach_name = attach['filename'][1:len(attach['filename'])-1] # remove quote marks from filename if re.search(self.parameters.attach_regex, attach_name): self.logger.info("Parsing attachment") if self.parameters.attach_unzip: zipped = zipfile.ZipFile(attach['content']) raw_report = zipped.read(zipped.namelist()[0]) else: raw_report = attach['content'].read() self.logger.info('content read') report = Report() report.add("raw", raw_report, sanitize=True) report.add("feed.name", self.parameters.feed,sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) time_observation = DateTime().generate_datetime_now() #report.add('time.observation', time_observation) report.add('feed.reportname', message.subject, sanitize=True) self.logger.info('rocking in a free world') self.send_message(report) self.logger.info('just some administration left') mailbox.mark_seen(uid) self.logger.info("Email report read") else: # If no attachment, read from url # update way of fetching from url to new way in http/ self.logger.info("No attachment found, trying collecting from URL") for body in message.body['plain']: self.logger.info("Parsing message body") match = re.search(self.parameters.url_regex, body) if match: url = match.group() self.logger.info("Downloading report from %s" % url) resp = requests.get(url=url) if resp.status_code // 100 != 2: raise ValueError('HTTP response status code was {}.' ''.format(resp.status_code)) raw_report = resp.content self.logger.info("Report downloaded.") report = Report() report.add("raw", raw_report, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) self.logger.info("all is well sir") time_observation = DateTime().generate_datetime_now() #report.add('time.observation', time_observation, sanitize=True) report.add('feed.reportname', message.subject, sanitize=True) self.send_message(report) mailbox.mark_seen(uid) self.logger.info("Email report read") except: self.logger.info("ERROR with the collector ---")
def process(self): RT = rt.Rt(self.parameters.uri, self.parameters.user, self.parameters.password) if not RT.login(): raise ValueError('Login failed.') query = RT.search(Queue=self.parameters.search_queue, Subject__like=self.parameters.search_subject_like, Owner=self.parameters.search_owner, Status=self.parameters.search_status) self.logger.info('{} results on search query.'.format(len(query))) for ticket in query: ticket_id = int(ticket['id'].split('/')[1]) self.logger.debug('Process ticket {}.'.format(ticket_id)) for (att_id, att_name, _, _) in RT.get_attachments(ticket_id): if re.search(self.parameters.attachment_regex, att_name): self.logger.debug('Found attachment {}: {!r}.' ''.format(att_id, att_name)) break else: self.logger.debug('No matching attachement name found.') continue attachment = RT.get_attachment_content(ticket_id, att_id) if self.parameters.unzip_attachment: file_obj = io.BytesIO(attachment) zipped = zipfile.ZipFile(file_obj) raw = zipped.read(zipped.namelist()[0]) else: raw = attachment if self.parameters.gnupg_decrypt: raw = str( self.gpg.decrypt( raw, always_trust=self.parameters.gnupg_trust, passphrase=self.parameters.gnupg_passphrase)) self.logger.info('Successfully decrypted attachment.') self.logger.debug(raw) report = Report() report.add("raw", raw, sanitize=True) report.add("rtir_id", ticket_id, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.send_message(report) if self.parameters.take_ticket: RT.edit_ticket(ticket_id, Owner=self.parameters.user)
def process(self): report = Report() if self.parameters.raw: report['raw'] = 'test' self.send_message(report)
def process(self): RT = rt.Rt(self.parameters.uri, self.parameters.user, self.parameters.password) if not RT.login(): raise ValueError('Login failed.') query = RT.search(Queue=self.parameters.search_queue, Subject__like=self.parameters.search_subject_like, Owner=self.parameters.search_owner, Status=self.parameters.search_status) self.logger.info('{} results on search query.'.format(len(query))) for ticket in query: ticket_id = int(ticket['id'].split('/')[1]) self.logger.debug('Process ticket {}.'.format(ticket_id)) for (att_id, att_name, _, _) in RT.get_attachments(ticket_id): if re.search(self.parameters.attachment_regex, att_name): self.logger.debug('Found attachment {}: {!r}.' ''.format(att_id, att_name)) break else: self.logger.debug('No matching attachement name found.') continue attachment = RT.get_attachment_content(ticket_id, att_id) if self.parameters.unzip_attachment: file_obj = io.BytesIO(attachment) zipped = zipfile.ZipFile(file_obj) raw = zipped.read(zipped.namelist()[0]) else: raw = attachment if self.parameters.gnupg_decrypt: raw = str(self.gpg.decrypt(raw, always_trust=self.parameters.gnupg_trust, passphrase=self.parameters.gnupg_passphrase)) self.logger.info('Successfully decrypted attachment.') self.logger.debug(raw) report = Report() report.add("raw", raw, sanitize=True) report.add("rtir_id", ticket_id, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.send_message(report) if self.parameters.take_ticket: RT.edit_ticket(ticket_id, Owner=self.parameters.user)
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl) emails = mailbox.messages(folder=self.parameters.folder, unread=True) if emails: for uid, message in emails: if (self.parameters.subject_regex and not re.search(self.parameters.subject_regex, message.subject)): continue for body in message.body['plain']: match = re.search(self.parameters.url_regex, str(body)) if match: url = match.group() url = url.strip() # strip leading and trailing spaces, newlines and carriage returns # Build request self.http_header = getattr(self.parameters, 'http_header', {}) self.http_verify_cert = getattr(self.parameters, 'http_verify_cert', True) if hasattr(self.parameters, 'http_user') and hasattr( self.parameters, 'http_password'): self.auth = (self.parameters.http_user, self.parameters.http_password) else: self.auth = None http_proxy = getattr(self.parameters, 'http_proxy', None) https_proxy = getattr(self.parameters, 'http_ssl_proxy', None) if http_proxy and https_proxy: self.proxy = {'http': http_proxy, 'https': https_proxy} else: self.proxy = None self.http_header['User-agent'] = self.parameters.http_user_agent self.logger.info("Downloading report from %r." % url) resp = requests.get(url=url, auth=self.auth, proxies=self.proxy, headers=self.http_header, verify=self.http_verify_cert) if resp.status_code // 100 != 2: raise ValueError('HTTP response status code was {}.' ''.format(resp.status_code)) self.logger.info("Report downloaded.") report = Report() report.add("raw", resp.content) self.send_message(report) # Only mark read if message relevant to this instance, # so other instances watching this mailbox will still # check it. mailbox.mark_seen(uid) self.logger.info("Email report read.") mailbox.logout()
def process(self): mailbox = imbox.Imbox(self.parameters.mail_host, self.parameters.mail_user, self.parameters.mail_password, self.parameters.mail_ssl) emails = mailbox.messages(folder=self.parameters.folder, unread=True) if emails: for uid, message in emails: if (self.parameters.subject_regex and not re.search( self.parameters.subject_regex, message.subject)): continue self.logger.info("Reading email report") for body in message.body['plain']: match = re.search(self.parameters.url_regex, str(body)) if match: url = match.group() url = url.strip( ) # strip leading and trailing spaces, newlines and carriage returns # Build request self.http_header = getattr(self.parameters, 'http_header', {}) self.http_verify_cert = getattr( self.parameters, 'http_verify_cert', True) if hasattr(self.parameters, 'http_user') and hasattr( self.parameters, 'http_password'): self.auth = (self.parameters.http_user, self.parameters.http_password) else: self.auth = None http_proxy = getattr(self.parameters, 'http_proxy', None) https_proxy = getattr(self.parameters, 'http_ssl_proxy', None) if http_proxy and https_proxy: self.proxy = { 'http': http_proxy, 'https': https_proxy } else: self.proxy = None self.http_header[ 'User-agent'] = self.parameters.http_user_agent self.logger.info("Downloading report from %s" % url) resp = requests.get(url=url, auth=self.auth, proxies=self.proxy, headers=self.http_header, verify=self.http_verify_cert) if resp.status_code // 100 != 2: raise ValueError( 'HTTP response status code was {}.' ''.format(resp.status_code)) self.logger.info("Report downloaded.") report = Report() report.add("raw", resp.content) report.add("feed.name", self.parameters.feed) report.add("feed.accuracy", self.parameters.accuracy) self.send_message(report) # Only mark read if message relevant to this instance, # so other instances watching this mailbox will still # check it. mailbox.mark_seen(uid) self.logger.info("Email report read")
def process(self): self.logger.info("Downloading report from %s" % self.parameters.http_url) resp = requests.get(url=self.parameters.http_url, auth=self.auth, proxies=self.proxy, headers=self.http_header, verify=self.http_verify_cert) if resp.status_code // 100 != 2: raise ValueError('HTTP response status code was {}.' ''.format(resp.status_code)) self.logger.info("Report downloaded.") report = Report() report.add("raw", resp.text, sanitize=True) report.add("feed.name", self.parameters.feed, sanitize=True) report.add("feed.url", self.parameters.http_url, sanitize=True) report.add("feed.accuracy", self.parameters.accuracy, sanitize=True) time_observation = DateTime().generate_datetime_now() report.add('time.observation', time_observation, sanitize=True) self.send_message(report)