예제 #1
0
def verify_node_test():

    disco = Discoverer()

    disco.get_list_of_nodes()

    n1 = disco.nodes_list[0]

    ret = disco.verify_node(n1)
예제 #2
0
 def run_discoverer(self, queue):
     # Collectors with discovery enabled discovery options
     collectors = [collector for collector in self.collectors if collector.discovery]
     discoverer_config = self.config.get("discoverer")
     discoverer_config["log_format"] = self.log_format
     discoverer = Discoverer(
         queue=queue, shutdown=self.shutdown_event, config=discoverer_config, collectors=collectors
     )
     self.processes.append(discoverer)
     discoverer.start()
예제 #3
0
 def __init__(self, config=None):
     self.config_mdb_atlas = config.get('mdb_atlas')
     self.config_email = config.get('email')
     self.config_sms = config.get('sms')
     self._discoverer = Discoverer(None, None)
     self._nmapper = Nmapper()
     self._alerter = Alerter()
     self._scan_db = MongoDBClient(
         self.config_mdb_atlas.get('scan_cluster_name'),
         self.config_mdb_atlas.get('scan_username'),
         self.config_mdb_atlas.get('scan_password'),
         self.config_mdb_atlas.get('scan_db'))
     self._running_event = None
     self._previous_scan = {'hosts_list': [], 'hosts_ports_scan': []}
     self._email_client = EmailClient(
         self.config_email.get('email_from_addr'),
         self.config_email.get('email_password'),
         self.config_email.get('email_smtp_server'),
         self.config_email.get('email_smtp_server_port'))
     self._sms_client = SmsClient(self.config_sms.get('sms_account_sid'),
                                  self.config_sms.get('sms_auth_token'))
예제 #4
0
 def SetCurrentFile(self, filename):
     self.current_file_uri = 'file://' + os.path.abspath(filename)
     self.pipeline.set_property('uri', self.current_file_uri)
     discoverer = Discoverer(os.path.abspath(filename))
     discoverer.connect('discovered', self._OnDiscovered)
     self.discovery_start_time = time.time()
     discoverer.discover()
     self.ready_to_play = False  #will be ready when discoverer has finished
예제 #5
0
    def create_thumbnails(self):
        _log.debug("Getting Thumbnails for %s" % self.filepath)

        if not os.path.exists(self.filepath):
            _log.debug("File not found: %s" % self.filepath)
            return False

        if self.fileinfo is None:
            self.fileinfo = Discoverer(self.filepath)
            self.fileinfo.do_discovery()

        if (self.fileinfo is None) or (self.fileinfo.videolength <= 0) or not self.fileinfo.is_video:
            _log.debug("Skipping thumbnail creation. No video stream found for file: %s." % self.filepath)
            return False
 
        offset = counter = 0 
        caps = "video/x-raw-rgb,format=RGB,width=%s,height=%s,pixel-aspect-ratio=1/1" % (self.width, self.height)
        cmd = "uridecodebin uri=file://%s  ! ffmpegcolorspace ! videorate ! videoscale ! " \
                "ffmpegcolorspace ! appsink name=sink caps=%s" % \
                (os.path.abspath(self.filepath), caps)

        pipeline = gst.parse_launch(cmd)
        appsink = pipeline.get_by_name("sink")
        appsink.set_property('emit-signals', True)
        #Set sync off, make decoding faster
        appsink.set_property('sync', False)
        appsink.connect('new-preroll', self.on_new_preroll_cb)
        pipeline.set_state(gst.STATE_PAUSED)
        pipeline.get_state()
    
        if self.interval is None:
            self.interval = ((self.fileinfo.videolength/gst.SECOND) / self.count) or 1
    
        while ((offset < self.fileinfo.videolength/gst.SECOND) and (counter < self.count)):
            ret = pipeline.seek_simple( 
                gst.FORMAT_TIME, gst.SEEK_FLAG_ACCURATE | gst.SEEK_FLAG_FLUSH, offset * gst.SECOND)
            pipeline.get_state()
            offset += self.interval
            counter += 1
        return True
예제 #6
0
class AutoscanApplication(object):
    """
    This is the Alerting Tool Business logic class. It applies the
    main buisiness logic for generating the scans, the alerts and
    the notifications. It orchestrates the interaction between each
    componenets of the application.
    """
    def __init__(self, config=None):
        self.config_mdb_atlas = config.get('mdb_atlas')
        self.config_email = config.get('email')
        self.config_sms = config.get('sms')
        self._discoverer = Discoverer(None, None)
        self._nmapper = Nmapper()
        self._alerter = Alerter()
        self._scan_db = MongoDBClient(
            self.config_mdb_atlas.get('scan_cluster_name'),
            self.config_mdb_atlas.get('scan_username'),
            self.config_mdb_atlas.get('scan_password'),
            self.config_mdb_atlas.get('scan_db'))
        self._running_event = None
        self._previous_scan = {'hosts_list': [], 'hosts_ports_scan': []}
        self._email_client = EmailClient(
            self.config_email.get('email_from_addr'),
            self.config_email.get('email_password'),
            self.config_email.get('email_smtp_server'),
            self.config_email.get('email_smtp_server_port'))
        self._sms_client = SmsClient(self.config_sms.get('sms_account_sid'),
                                     self.config_sms.get('sms_auth_token'))

    def catch_exceptions(cancel_on_failure=False):
        def catch_exceptions_decorator(job_func):
            @functools.wraps(job_func)
            def wrapper(*args, **kwargs):
                try:
                    return job_func(*args, **kwargs)
                except:
                    ('Exception caught: ')
                    import traceback
                    print(traceback.format_exc())
                    if cancel_on_failure:
                        return schedule.CancelJob

            return wrapper

        return catch_exceptions_decorator

    @catch_exceptions(cancel_on_failure=False)
    def _job(self):
        """
        The job represents each steps executed every time the scheduler
        is triggered.
        :return:
        """
        print("AutoscanApplication _job Discovering hosts: ")
        scan_result = self._create_scan_result()
        print("AutoscanApplication _job Saving the scan: ")
        self._save_scan_result(scan_result)
        print("AutoscanApplication _job Scan saved: ")
        alerts_result = {}
        alerts_result = self._alerter.inspect_generate_alert_output(
            self._previous_scan, scan_result, scan_result['scan_time'])
        print("AutoscanApplication _job Saving the alerts: ")
        ssls_result = alerts_result.get('ssl_analysis', None)
        alerts_result.pop('ssl_analysis')
        alerting_check = alerts_result.get('alerting_check', None)
        alerts_result.pop('alerting_check')
        self._save_alerts_result(alerts_result)
        print("AutoscanApplication _job Alerts saved: ")
        print("AutoscanApplication _job Saving the ssls: ")
        self._save_ssls_result(ssls_result)
        print("AutoscanApplication _job SSLs saved: ")
        self._previous_scan = scan_result
        self._verify_and_generate_alerts(self._alerting_config, alerting_check,
                                         scan_result['scan_time'])

    def _run_continuously(self, schedule, interval):
        """Continuously run, while executing pending jobs at each elapsed
        time interval.
        """
        cease_continuous_run = threading.Event()

        class ScheduleThread(threading.Thread):
            @classmethod
            def run(cls):
                print('ScheduleThread run:')
                while not cease_continuous_run.is_set():
                    print('ScheduleThread schedule: ' +
                          str(cease_continuous_run.is_set()))
                    print('ScheduleThread schedule run_pending:')
                    schedule.run_pending()
                    time.sleep(interval)

        continuous_thread = ScheduleThread()
        continuous_thread.start()
        return cease_continuous_run

    def _print_scans(self):
        discoveries = self._scan_db.collection(
            self.config_mdb_atlas.get('scans_coll'))
        cursor = discoveries.find()
        for document in cursor:
            print(document)

    def start(self, config):
        """
        Creates the job to be scheduled and returns the control to
        the main thread the Web Application

        :param config:
        :return:
        """
        print('AutoscanApplication start:')
        if config['network'] != None:
            self._discoverer.network(config['network'])
        if config['mask'] != None:
            self._discoverer.mask(config['mask'])
        schedule.every(config['schedule_interval']).minutes.do(
            self._job).tag('scan')
        self._running_event = self._run_continuously(schedule,
                                                     config['interval'])
        p_scan = self._find_last_scan()
        if p_scan.count():
            self._previous_scan = p_scan[0]
        self._alerting_config = config['alerting_config']
        return self._running_event.is_set()

    def stop(self):
        """
        Stop the scheduler for executing the next job. The current job can't
        be stopped if it is still running.
        :return:
        """
        print('AutoscanApplication stop:')
        result = True
        if self._running_event is not None:
            self._running_event.set()
            schedule.clear('scan')
            result = self._running_event.is_set()
        return result

    def _verify_and_generate_alerts(self, alerting_config, alerting_check,
                                    scan_time):
        """
        Compares the current alerting configuration and the alerts checks executed by
        the Alerter class

        :param alerting_config:
        :param alerting_check:
        :param scan_time:
        :return:
        """
        def _verify_email(email_ac, alerting_check):
            result = False
            if email_ac.get('missing_hosts',
                            False) and alerting_check.get('missing_hosts'):
                result = True
            elif email_ac.get('new_hosts',
                              False) and alerting_check.get('new_hosts'):
                result = True
            elif email_ac.get('missing_ports',
                              False) and alerting_check.get('missing_ports'):
                result = True
            elif email_ac.get('new_ports',
                              False) and alerting_check.get('new_ports'):
                result = True
            elif email_ac.get('vulns', False) and alerting_check.get('vulns'):
                result = True
            elif email_ac.get('ssl_issues',
                              False) and alerting_check.get('ssl_issues'):
                result = True
            return result

        def _verify_sms(sms_ac, alerting_check):
            result = False
            if sms_ac.get('missing_hosts',
                          False) and alerting_check.get('missing_hosts'):
                result = True
            elif sms_ac.get('new_hosts',
                            False) and alerting_check.get('new_hosts'):
                result = True
            elif sms_ac.get('missing_ports',
                            False) and alerting_check.get('missing_ports'):
                result = True
            elif sms_ac.get('new_ports',
                            False) and alerting_check.get('new_ports'):
                result = True
            elif sms_ac.get('vulns', False) and alerting_check.get('vulns'):
                result = True
            elif sms_ac.get('ssl_issues',
                            False) and alerting_check.get('ssl_issues'):
                result = True
            return result

        print(
            "AutoscanApplication _verify_and_generate_alerts alerting_config: ",
            alerting_config)
        print(
            "AutoscanApplication _verify_and_generate_alerts alerting_check: ",
            alerting_check)
        print("AutoscanApplication _job Checking for Sending Email: ")
        if _verify_email(email_ac=alerting_config.get('email'),
                         alerting_check=alerting_check):
            self._create_and_send_email(scan_time)
            print("AutoscanApplication _job Email Sent: ")
        else:
            print("AutoscanApplication _job Email NOT Sent: ")
        print("AutoscanApplication _job Checking for Sending SMS: ")
        if _verify_sms(sms_ac=alerting_config.get('sms'),
                       alerting_check=alerting_check):
            self._create_and_send_sms(scan_time)
            print("AutoscanApplication _job SMS Sent: ")
        else:
            print("AutoscanApplication _job SMS NOT Sent: ")

    def _create_and_send_sms(self, scan_time):
        """
        Generates a simple SMS body message and sends it to the SMS external subsystem.

        :param scan_time:
        :return:
        """
        body = "Alerting Tool: new alerts for network topology, vulnerabilities " \
               "or SSL certificates, please verify ASAP. Scan Time: " + str(scan_time)
        self._sms_client.send_sms_alert(self.config_sms.get('sms_to_number'),
                                        self.config_sms.get('sms_from_number'),
                                        body)

    def _create_and_send_email(self, scan_time):
        """
        Generates the HTML body and sends it to the SMTP external subsystem.

        :param scan_time:
        :return:
        """
        text_part = "AlertingTool Email Report from scan finished on: " + str(
            scan_time)
        subject = "AlertingTool Email Report " + str(scan_time)
        toaddr_list = [
            e.strip()
            for e in self.config_email.get('email_to_addr').split(',')
        ]
        html_part = "<h1>Network Topology and Vulnerabilities Alert</h1> <br> " + self.get_html_alert_result(
            str(scan_time)
        ) + "<h1>SSL Analysis Alert</h1> <br> " + self.get_html_ssl_result(
            str(scan_time))
        self._email_client.send_mail_alert(toaddr_list, text_part, html_part,
                                           subject)

    def get_html_scan_result(self, scan_time):
        input = self._get_scan_by_date(
            self._parse_string_to_datetime(scan_time))
        converted = json2html.convert(json=input,
                                      table_attributes="id=\"scan-table\"")
        return converted

    def get_html_alert_result(self, scan_time):
        input = self._get_alert_by_date(
            self._parse_string_to_datetime(scan_time))
        converted = json2html.convert(json=input,
                                      table_attributes="id=\"alerts-table\"")
        return converted

    def get_html_ssl_result(self, scan_time):
        input = self._get_ssl_by_date(
            self._parse_string_to_datetime(scan_time))
        converted = json2html.convert(json=input,
                                      table_attributes="id=\"ssls-table\"")
        return converted

    def get_scans_limit(self, limit):
        return self._get_last_scans_limit(limit)

    def get_alerts_limit(self, limit):
        return self._get_last_alerts_limit(limit)

    def get_ssls_limit(self, limit):
        return self._get_last_ssls_limit(limit)

    def _parse_string_to_datetime(self, string_time):
        return parser.parse(string_time)

    def _get_scan_by_date(self, scantime):
        result = self._scan_db.collection(
            self.config_mdb_atlas.get('scans_coll')).find_one(
                {"scan_time": scantime}, {'_id': 0})
        return result

    def _get_alert_by_date(self, scantime):
        result = self._scan_db.collection(
            self.config_mdb_atlas.get('alerts_coll')).find_one(
                {"scan_time": scantime}, {'_id': 0})
        return result

    def _get_ssl_by_date(self, scantime):
        result = self._scan_db.collection(
            self.config_mdb_atlas.get('ssls_coll')).find_one(
                {"scan_time": scantime}, {'_id': 0})
        return result

    def _save_scan_result(self, scan_result):
        discoveries = self._scan_db.collection(
            self.config_mdb_atlas.get('scans_coll'))
        discoveries.insert_one(scan_result)

    def _save_alerts_result(self, alerts_result):
        alerts = self._scan_db.collection(
            self.config_mdb_atlas.get('alerts_coll'))
        alerts.insert_one(alerts_result)

    def _save_ssls_result(self, ssls_result):
        if ssls_result:
            ssls = self._scan_db.collection(
                self.config_mdb_atlas.get('ssls_coll'))
            ssls.insert_one(ssls_result)

    def _get_last_scans_limit(self, limit=1):
        print("autoscan_app _get_last_scans_limit")
        return self._scan_db.collection(
            self.config_mdb_atlas.get('scans_coll')).find().sort(
                "scan_time", -1).limit(limit)

    def _get_last_alerts_limit(self, limit=1):
        print("autoscan_app _get_last_alert_limit")
        return self._scan_db.collection(
            self.config_mdb_atlas.get('alerts_coll')).find().sort(
                "scan_time", -1).limit(limit)

    def _get_last_ssls_limit(self, limit=1):
        print("autoscan_app _get_last_ssls_limit")
        return self._scan_db.collection(
            self.config_mdb_atlas.get('ssls_coll')).find().sort(
                "scan_time", -1).limit(limit)

    def _find_last_scan(self):
        print('AutoscanApplication _find_last_scan:')
        return self._scan_db.collection(
            self.config_mdb_atlas.get('scans_coll')).find().sort(
                "scan_time", -1).limit(1)

    def get_alert(self, scan_time=None):
        if not scan_time:
            return self._find_last_alert()

    def _find_last_alert(self):
        print('AutoscanApplication _find_last_alert')
        alerts = self._scan_db.collection(
            self.config_mdb_atlas.get('alerts_coll')).find({}, {
                '_id': False,
                'scan_time': False
            }).sort("scan_time", -1).limit(1)
        result = {"message": "no alerts"}
        if alerts.count():
            result = alerts[0]
            print(result)
        return dumps(result)

    def _create_scan_result(self):
        print('AutoscanApplication _create_scan_result:')
        ip_discovery = self._discoverer.discover()
        nmapper_result = []
        for ip in ip_discovery:
            scan_result = self._nmapper.scan_ports_per_host(ip)
            nmapper_result.append(scan_result)

        result = {
            'scan_time': datetime.datetime.utcnow(),
            'hosts_list': ip_discovery,
            'hosts_ports_scan': nmapper_result
        }

        return result
예제 #7
0
class Thumbnailer(object):
    
    def __init__(self, filepath, output_dir, fileinfo=None, interval=None, number=5,\
                 width=120, height=90, preserve_aspect_ratio=True, prefix="thumbnail", format='jpeg'):
        self.filepath = filepath
        self.output_dir = output_dir
        self.width = width
        #The default scaling is set to 4:3, with 120*90
        self.height = height
        self.count = number
        self.interval = interval
        #TODO: Need to use proper pixel-aspect-ratio
        self.par = preserve_aspect_ratio
        self.prefix = prefix
        self.format = format
        self.fileinfo = fileinfo

    def on_new_preroll_cb(self, appsink):
        buffer = appsink.emit('pull-preroll')
        if buffer:
            self._load_and_save_file(buffer, buffer.timestamp/gst.SECOND)

    def create_thumbnails(self):
        _log.debug("Getting Thumbnails for %s" % self.filepath)

        if not os.path.exists(self.filepath):
            _log.debug("File not found: %s" % self.filepath)
            return False

        if self.fileinfo is None:
            self.fileinfo = Discoverer(self.filepath)
            self.fileinfo.do_discovery()

        if (self.fileinfo is None) or (self.fileinfo.videolength <= 0) or not self.fileinfo.is_video:
            _log.debug("Skipping thumbnail creation. No video stream found for file: %s." % self.filepath)
            return False
 
        offset = counter = 0 
        caps = "video/x-raw-rgb,format=RGB,width=%s,height=%s,pixel-aspect-ratio=1/1" % (self.width, self.height)
        cmd = "uridecodebin uri=file://%s  ! ffmpegcolorspace ! videorate ! videoscale ! " \
                "ffmpegcolorspace ! appsink name=sink caps=%s" % \
                (os.path.abspath(self.filepath), caps)

        pipeline = gst.parse_launch(cmd)
        appsink = pipeline.get_by_name("sink")
        appsink.set_property('emit-signals', True)
        #Set sync off, make decoding faster
        appsink.set_property('sync', False)
        appsink.connect('new-preroll', self.on_new_preroll_cb)
        pipeline.set_state(gst.STATE_PAUSED)
        pipeline.get_state()
    
        if self.interval is None:
            self.interval = ((self.fileinfo.videolength/gst.SECOND) / self.count) or 1
    
        while ((offset < self.fileinfo.videolength/gst.SECOND) and (counter < self.count)):
            ret = pipeline.seek_simple( 
                gst.FORMAT_TIME, gst.SEEK_FLAG_ACCURATE | gst.SEEK_FLAG_FLUSH, offset * gst.SECOND)
            pipeline.get_state()
            offset += self.interval
            counter += 1
        return True

    # Load pixbuf and save file to disk
    def _load_and_save_file(self, buffer, offset):
        file_name = "%s/%s_%s.%s" %  (self.output_dir, self.prefix, offset, self.format)
        try:
            pix_buf = gtk.gdk.pixbuf_new_from_data(buffer.data, \
                        gtk.gdk.COLORSPACE_RGB, False, 8, self.width, self.height, self.width * 3)
            pix_buf.save(file_name, 'jpeg')
        except Exception as e:
            _log.debug("Error saving %s to disk: %s " % (file_name, e))
if not os.path.exists(output_home):
    logger.info('Output path does not exists and created.')
    os.makedirs(output_home)

threshold_parameter = namedtuple(
    'threshold_parameter', ['tf', 'agg_coef', 'max_entropy', 'min_entropy'])
threshold_parameters = dict()

threshold_parameters['bigram'] = threshold_parameter(*args.bigram)
threshold_parameters['latin'] = threshold_parameter(*args.latin)
threshold_parameters[2] = threshold_parameter(*args.unigram_2)
threshold_parameters[3] = threshold_parameter(*args.unigram_3)

dictionary = load_dictionary(args.dictionary_path)

discoverer = Discoverer(save_segmentation=False)

# Used to store stats generated in each iteration.
stats_ind = list()

import time

for iteration in range(args.iteration):
    time.sleep(1)
    logger.info("""
   **********************************************************************
    
    commencing iteration {}...
    
   **********************************************************************
    """.format(iteration + 1))