Exemple #1
0
    def to_html_template(self):
        """ Dictionary used to render FORM

        :return     Dictionnary of configuration parameters
        """
        """ Retrieve optional proxy configuration """
        tmp = get_proxy(True)
        if tmp:
            proxy_configuration = "http-proxy-retry" + '\n'
            proxy_configuration += "http-proxy {} {}".format(tmp[0],
                                                             tmp[1]) + '\n'
        else:
            proxy_configuration = ""
        """ And returns the attributes of the class """
        return {
            'id': str(self.id),
            'node': self.node.name,
            'remote_server': self.remote_server,
            'remote_port': self.remote_port,
            'proto': self.proto,
            'status': self.status,
            'tunnels_status': self.tunnels_status,
            'proxy_configuration': proxy_configuration,
            'ca':
            self.tls_profile.x509_certificate.get_base_filename() + ".chain",
            'cert':
            self.tls_profile.x509_certificate.get_base_filename() + ".crt",
            'key':
            self.tls_profile.x509_certificate.get_base_filename() + ".key"
        }
Exemple #2
0
    def __execute_query(self, uri, data={}):
        uri = "{}{}{}".format(self.predator_host, self.predator_version, uri)

        try:
            logger.info('[PREDATOR] Calling {}'.format(uri))

            r = requests.get(uri,
                             data=data,
                             proxies=get_proxy(),
                             headers={'Authorization': self.predator_api_key})

            if r.status_code != 200:
                if settings.DEV_MODE:
                    return False, r.text

                return False, _("An error has occurred")

            return True, r.json()

        except json.decoder.JSONDecodeError:
            logger.error('Error JSON while calling {}'.format(uri))
            return False, _('An error has occurred')

        except ConnectionError as e:
            logger.critical(e, exc_info=1)
            return False, _("Unable to contact API")

        except Exception as e:
            if settings.DEV_MODE:
                raise

            logger.critical(e, exc_info=1)
            return False, _('An error has occurred')
Exemple #3
0
def rss_fetch():
    if not Cluster.get_current_node().is_master_mongo:
        logger.debug(
            "Crontab::rss_fetch: Not the master node, passing RSS fetch")
        return

    proxy = get_proxy()
    try:
        rss_uri = "https://predator.vultureproject.org/news.json"
        infos = requests.get(rss_uri, proxies=proxy).json()
        logger.debug("Crontab::rss_fetch: Received {} RSS feed".format(
            len(infos)))
        for info in infos:
            try:
                RSS.objects.get(title=info['title'])
            except RSS.DoesNotExist:
                RSS.objects.create(title=info['title'],
                                   date=timezone.make_aware(
                                       datetime.datetime.strptime(
                                           info['timestamp'],
                                           "%d/%m/%Y %H:%M:%S")),
                                   level=info['level'],
                                   content=info["content"])

    except Exception as e:
        logger.error("Crontab::rss_fetch: {}".format(e), exc_info=1)
        raise
Exemple #4
0
 def get_oauth2_session(self, redirect_uri):
     session = OAuth2Session(self.client_id, redirect_uri=redirect_uri, scope=self.scopes)
     if self.use_proxy:
         session.proxies=get_proxy()
     if not self.verify_certificate:
         session.verify = False
     return session
Exemple #5
0
 def retrieve_config(self, test=False, force=True):
     # TODO : Handle CA_BUNDLE
     # If loaded data is too old, reload it again
     refresh_time = timezone.now() - timedelta(hours=CONFIG_RELOAD_INTERVAL)
     if (self.last_config_time is None or self.last_config_time < refresh_time)\
             or test:
         logger.info(get_proxy() if self.use_proxy else None)
         r = requests.get("{}/.well-known/openid-configuration".format(self.provider_url),
                          proxies=get_proxy() if self.use_proxy else None,
                          verify=self.verify_certificate, timeout=10)
         r.raise_for_status()
         config = r.json()
         logger.info(config)
         self.issuer = config['issuer']
         self.authorization_endpoint = config['authorization_endpoint']
         self.token_endpoint = config['token_endpoint']
         self.userinfo_endpoint = config['userinfo_endpoint']
         self.end_session_endpoint = config.get('end_session_endpoint') or config['revocation_endpoint']
         self.last_config_time = timezone.now()
         if not test:
             self.save()
         else:
             return config
Exemple #6
0
    def submit_ip(self):
        uri = "{}{}{}".format(self.predator_host, self.predator_version,
                              "/reputation/vulture/{}/".format(self.info))

        r = requests.put(uri,
                         proxies=get_proxy(),
                         headers={'Authorization': self.predator_api_key})

        if r.status_code != 200:
            if settings.DEV_MODE:
                return False, r.text

            return False, _('An error has occurred')

        return True, r.json()
def doc_update(filename=None):

    proxy = get_proxy()

    if not filename:
        filename = get_version()

    try:
        sha_uri = f"https://download.vultureproject.org/v4/doc/{filename}.sha"
        r = requests.get(sha_uri, proxies=proxy)

        if r.status_code == 404 and filename != "master":
            doc_update("master")
            return

        sha = r.content.decode().strip()
        try:
            current_sha = get_current_sha()
            if current_sha == sha:
                logger.debug("[DOCUMENTATION] Identical SHA. Passing")
                return
        except FileNotFoundError:
            pass

        logger.info("[DOCUMENTATION] New version is available. Download it")

        # Write sha
        with open(f"{DOC_PATH}/doc.sha", 'w') as f:
            f.write(sha)

        # Download documentation
        doc_uri = f"https://download.vultureproject.org/v4/doc/{filename}.tar.gz"
        doc = requests.get(doc_uri, proxies=proxy)
        with open(f"{DOC_PATH}/{filename}.tar.gz", 'wb') as f:
            f.write(doc.content)

        tf = tarfile.open(f"{DOC_PATH}/{filename}.tar.gz")
        tf.extractall(f"{DOC_PATH}/")

    except Exception as e:
        logger.error("Crontab::doc_update: {}".format(e), exc_info=1)
        raise
Exemple #8
0
 def download_file(self):
     """ """
     """ If we haven't already downloaded url """
     if self.content:
         return self.content
     """ Retrieve url and content """
     auth = None
     if self.auth_type:
         auth_type = AUTH_TYPE_CLASSES.get(self.auth_type)
         if auth_type:
             auth = auth_type(self.user, self.password)
     logger.debug("Try to get URL {}".format(self.url))
     try:
         response = requests.request(
             self.method,
             self.url,
             data=self.post_data if self.method == "POST" else None,
             headers=self.custom_headers,
             auth=auth,
             allow_redirects=True,
             proxies=get_proxy(),
             timeout=(2.0, 2.0))
         # logger.info("URL '{}' retrieved, status code = {}".format(self.url, response.status_code))
         assert response.status_code == 200, "Response code is not 200 ({})".format(
             response.status_code)
         """ If its a .gz file, dezip-it """
         if self.url[-3:] == ".gz":
             self.filename = self.url.split('/')[-1][:-3]
             return gzip_decompress(response.content)
         if response.headers.get("Content-Disposition"):
             match = REGEX_GZ.search(
                 response.headers.get("Content-Disposition"))
             if match and match[1][-3:] == ".gz":
                 self.filename = match[1][:-3]
                 return gzip_decompress(response.content)
         if not self.filename:
             self.filename = self.url.split('/')[-1]
     except Exception as e:
         raise VultureSystemError(str(e), "download '{}'".format(self.url))
     return response.content
Exemple #9
0
def doc_update():

    proxy = get_proxy()
    try:
        doc_uri = "https://github.com/VultureProject/vulture-doc/archive/master.zip"
        doc = requests.get(doc_uri, proxies=proxy)
        logger.debug("Crontab::doc_update: Downloading DOC from Github")

        with open(
                "/zroot/apache" + settings.DOCUMENTATION_PATH + "/" +
                "master.zip", "wb") as f:
            f.write(doc.content)

        with zipfile.ZipFile(
                "/zroot/apache" + settings.DOCUMENTATION_PATH + "/master.zip",
                'r') as zip_ref:
            zip_ref.extractall("/zroot/apache" + settings.DOCUMENTATION_PATH +
                               "/")

    except Exception as e:
        logger.error("Crontab::doc_update: {}".format(e), exc_info=1)
        raise
Exemple #10
0
def security_update(node_logger=None, tenant_id=None):
    """
    :return: Update Vulture's security databases
    """
    # Get proxy first
    proxies = get_proxy()
    """ Every node needs to be up2date """
    try:
        logger.info("Crontab::security_update: calling pkg update...")
        res = subprocess.check_output([
            "/usr/local/bin/sudo", "/usr/sbin/pkg", "-ohttp_proxy={}".format(
                proxies.get('http', "")), "-ohttps_proxy={}".format(
                    proxies.get('https', "")), "-oftp_proxy={}".format(
                        proxies.get('ftp', "")), "update"
        ],
                                      stderr=subprocess.PIPE).decode("utf-8")
        if "All repositories are up to date" not in res:
            logger.error("Crontab::security_update: Unable to update pkg")
        else:
            logger.info(
                "Crontab::security_update: All repositories are up to date")
    except subprocess.CalledProcessError as e:
        logger.error("Failed to update pkg packages : {}".format(
            str(e.stderr.decode('utf8'))))
    except Exception as e:
        logger.error("Failed to update pkg packages : {}".format(str(e)))
    """ Do we have something urgent to update ? """
    try:
        logger.info("Crontab::security_update: calling pkg upgrade...")
        res = subprocess.check_output([
            "/usr/local/bin/sudo", "/usr/sbin/pkg", "-ohttp_proxy={}".format(
                proxies.get('http', "")), "-ohttps_proxy={}".format(
                    proxies.get('https', "")), "-oftp_proxy={}".format(
                        proxies.get('ftp', "")), "audit", "-F"
        ],
                                      stderr=subprocess.PIPE).decode('utf8')
        if "0 problem" in res:
            logger.info("Crontab::security_update: No vulnerability found.")
        elif "is vulnerable" in res:
            logger.info(
                "Crontab::security_update: Security problem found : {}".format(
                    res))
            security_alert(
                "Security problem found on node {}".format(get_hostname()),
                "danger", res)
    except subprocess.CalledProcessError as e:
        if e.stdout.decode("utf-8").startswith("0 problem"):
            logger.info("Crontab::security_update: No vulnerability found.")
        elif "is vulnerable" in e.stdout.decode("utf-8"):
            logger.info(
                "Crontab::security_update: Security problem found : {}".format(
                    e.stdout.decode('utf-8')))
            security_alert(
                "Security problem found on node {}".format(get_hostname()),
                "danger", e.stdout.decode("utf-8"))
        else:
            logger.error(
                "Crontab::security_update: Failed to retrieve vulnerabilities : "
                "{}".format(str(e)))
    except Exception as e:
        logger.error(
            "Crontab::security_update: Failed to retrieve vulnerabilities : {}"
            .format(e))

    # If tenant id given, try to retrieve the tenant
    if tenant_id:
        try:
            tenant = Tenants.objects.get(pk=tenant_id)
        except:
            logger.error(
                "Security_update: Failed to retrieve reputation database with asked id {}"
                .format(tenant_id))
            raise Exception("Tenant not found")

    # If it is the master node, retrieve the databases
    if Cluster.get_current_node().is_master_mongo:
        # If tenant id given, retrieve the predator api key
        if tenant_id:
            predator_tokens = [tenant.predator_apikey]
        else:
            predator_tokens = Tenants.objects.mongo_distinct("predator_apikey")
        # Loop over predator api keys configured over Multi-Tenants configs
        for predator_token in predator_tokens:
            """ Download newest reputation databases list """
            try:
                logger.info(
                    "Crontab::security_update: get Vulture's ipsets...")
                infos = requests.get(IPSET_VULTURE + "index.json",
                                     headers={
                                         'Authorization': predator_token
                                     },
                                     proxies=proxies,
                                     timeout=5).json()
            except Exception as e:
                logger.error(
                    "Crontab::security_update: Unable to download Vulture's ipsets: {}"
                    .format(e))
                return False

            infos.append({
                'filename': "firehol_level1.netset",
                'label': "Firehol Level 1 netset",
                'description': "Firehol IPSET Level 1",
                'type': "ipv4_netset",
                'url': IPSET_VULTURE + "firehol_level1.netset"
            })
            infos.append({
                'filename': "vulture-v4.netset",
                'label': "Vulture Cloud IPv4",
                'description': "Vulture Cloud IPv4",
                'type': "ipv4_netset",
                'url': IPSET_VULTURE + "firehol_level1.netset"
            })
            infos.append({
                'filename': "vulture-v6.netset",
                'label': "Vulture Cloud IPv6",
                'description': "Vulture Cloud IPv6",
                'type': "ipv6_netset",
                'url': IPSET_VULTURE + "vulture-v6.netset"
            })

            for info in infos:
                label = info['label']
                description = info['description']
                entry_type = info['type']
                url = info.get('url', IPSET_VULTURE + info['filename'])
                nb_netset = info.get('nb_netset', 0)
                nb_unique = info.get('nb_unique', 0)
                # Add predator api key in filename
                encoded_token = b64encode(
                    predator_token.encode('utf8')).decode('utf8')
                filename = ".".join(info['filename'].split('.')[:-1]) + "_" + encoded_token + "." + \
                           info['filename'].split('.')[-1]
                """ Create/update object """
                try:
                    reputation_ctx = ReputationContext.objects.get(
                        filename=filename)
                except Exception as e:
                    reputation_ctx = ReputationContext(filename=filename)
                reputation_ctx.name = label
                reputation_ctx.url = url
                reputation_ctx.db_type = entry_type
                reputation_ctx.label = label
                reputation_ctx.description = description
                reputation_ctx.nb_netset = nb_netset
                reputation_ctx.nb_unique = nb_unique
                reputation_ctx.internal = True
                # Use predator_apikey only for predator requests
                if "predator.vultureproject.org" in reputation_ctx.url:
                    reputation_ctx.custom_headers = {
                        'Authorization': predator_token
                    }
                else:
                    reputation_ctx.custom_headers = {}
                reputation_ctx.save()
                logger.info("Reputation context {} created.".format(label))

    # On ALL nodes, write databases on disk
    # All internal reputation contexts are retrieved and created if needed
    # We can now download and write all reputation contexts
    # If tenant id given, only write on disk related reputation databases
    if tenant_id:
        encoded_token = b64encode(
            tenant.predator_apikey.encode('utf8')).decode('utf8')
        reputation_ctxs = ReputationContext.mongo_find({
            "enable_hour_download": "true",
            "filename": {
                "$regex": ".*_{}.[a-z]+$".format(encoded_token)
            }
        })
    else:
        reputation_ctxs = ReputationContext.objects.filter(
            enable_hour_download=True)
    for reputation_ctx in reputation_ctxs:
        try:
            content = reputation_ctx.download()
        except VultureSystemError as e:
            if "404" in str(e) or "403" in str(e) and reputation_ctx.internal:
                logger.info(
                    "Security_update::info: Reputation context '{}' is now unavailable ({}). "
                    "Deleting it.".format(str(e), reputation_ctx))
                reputation_ctx.delete()
            else:
                logger.error(
                    "Security_update::error: Failed to download reputation database '{}' : {}"
                    .format(reputation_ctx.name, e))
            continue
        except Exception as e:
            logger.error(
                "Security_update::error: Failed to download reputation database '{}' : {}"
                .format(reputation_ctx.name, e))
            continue
        try:
            tmp_filename = "{}{}".format("/tmp/", get_random_string())
            with open(tmp_filename, "wb") as f:
                f.write(content)
            """ Immediatly reload the rsyslog service to prevent crash on MMDB access """
            # Filename is a variable of us (not injectable)
            reload_rsyslog = subprocess.run([
                '/usr/local/bin/sudo /bin/mv {} {}'
                '&& /usr/local/bin/sudo /usr/sbin/jexec '
                'rsyslog /usr/sbin/service rsyslogd reload'.format(
                    tmp_filename, reputation_ctx.absolute_filename)
            ],
                                            stdout=subprocess.PIPE,
                                            stderr=subprocess.PIPE,
                                            shell=True)
            if reload_rsyslog.returncode == 1:
                if "rsyslogd not running" in reload_rsyslog.stderr.decode(
                        'utf8'):
                    logger.info(
                        "Crontab::security_update: Database written and rsyslogd not runing."
                    )
                else:
                    logger.error(
                        "Crontab::security_update: It seems that the database cannot be written : {}"
                        .format(e))
            elif reload_rsyslog.returncode == 0:
                logger.info(
                    "Crontab::security_update: Database written and rsyslogd reloaded."
                )
            else:
                logger.error(
                    "Crontab::security_update: Database write failure : "
                    "stdout={}, stderr={}".format(
                        reload_rsyslog.stdout.decode('utf8'),
                        reload_rsyslog.stderr.decode('utf8')))
            logger.info(
                "Crontab::security_update: Reputation database named '{}' (file '{}') successfully written."
                .format(reputation_ctx.name, reputation_ctx.absolute_filename))
        except Exception as e:
            logger.error(
                "Security_update::error: Failed to write reputation database '{}' : {}"
                .format(reputation_ctx.name, e))

    logger.info("Security_update done.")

    return True
Exemple #11
0
def security_update(node_logger=None):
    """
    :return: Update Vulture's security databases
    """
    # Get proxy first
    proxies = get_proxy()
    """ Every node needs to be up2date """
    try:
        logger.info("Crontab::security_update: calling pkg update...")
        res = subprocess.check_output([
            "/usr/local/bin/sudo", "/usr/sbin/pkg", "-ohttp_proxy={}".format(
                proxies.get('http', "")), "-ohttps_proxy={}".format(
                    proxies.get('https', "")), "-oftp_proxy={}".format(
                        proxies.get('ftp', "")), "update"
        ],
                                      stderr=subprocess.PIPE).decode("utf-8")
        if "All repositories are up to date" not in res:
            logger.error("Crontab::security_update: Unable to update pkg")
        else:
            logger.info(
                "Crontab::security_update: All repositories are up to date")
    except subprocess.CalledProcessError as e:
        logger.error("Failed to update pkg packages : {}".format(
            str(e.stderr.decode('utf8'))))
    except Exception as e:
        logger.error("Failed to update pkg packages : {}".format(str(e)))
    """ Do we have something urgent to update ? """
    try:
        logger.info("Crontab::security_update: calling pkg upgrade...")
        res = subprocess.check_output([
            "/usr/local/bin/sudo", "/usr/sbin/pkg", "-ohttp_proxy={}".format(
                proxies.get('http', "")), "-ohttps_proxy={}".format(
                    proxies.get('https', "")), "-oftp_proxy={}".format(
                        proxies.get('ftp', "")), "audit", "-F"
        ],
                                      stderr=subprocess.PIPE).decode('utf8')
        if "0 problem" in res:
            logger.info("Crontab::security_update: No vulnerability found.")
        elif "is vulnerable" in res:
            logger.info(
                "Crontab::security_update: Security problem found : {}".format(
                    res))
            security_alert(
                "Security problem found on node {}".format(get_hostname()),
                "danger", res)
    except subprocess.CalledProcessError as e:
        if e.stdout.decode("utf-8").startswith("0 problem"):
            logger.info("Crontab::security_update: No vulnerability found.")
        elif "is vulnerable" in e.stdout.decode("utf-8"):
            logger.info(
                "Crontab::security_update: Security problem found : {}".format(
                    e.stdout.decode('utf-8')))
            security_alert(
                "Security problem found on node {}".format(get_hostname()),
                "danger", e.stdout.decode("utf-8"))
        else:
            logger.error(
                "Crontab::security_update: Failed to retrieve vulnerabilities : "
                "{}".format(str(e)))
    except Exception as e:
        logger.error(
            "Crontab::security_update: Failed to retrieve vulnerabilities : {}"
            .format(e))

    # If it is the master node, retrieve the databases
    if Cluster.get_current_node().is_master_mongo:
        # Retrieve predator_token
        predator_token = Cluster.get_global_config().predator_apikey
        """ If we are the master node, download newest reputation databases """
        try:
            logger.info("Crontab::security_update: get Vulture's ipsets...")
            infos = requests.get(IPSET_VULTURE + "index.json",
                                 headers={
                                     'Authorization': predator_token
                                 },
                                 proxies=proxies,
                                 timeout=5).json()
        except Exception as e:
            logger.error(
                "Crontab::security_update: Unable to download Vulture's ipsets: {}"
                .format(e))
            return False

        infos.append({
            'filename':
            "GeoLite2-Country.mmdb",
            'label':
            "Geolite2 Country",
            'description':
            "Maxmind DB's Geoip country database",
            'type':
            "GeoIP",
            'url':
            "https://updates.maxmind.com/geoip/databases/GeoLite2-Country/update"
        })
        infos.append({
            'filename':
            "GeoLite2-City.mmdb",
            'label':
            "Geolite2 City",
            'description':
            "Maxmind DB's Geoip city database",
            'type':
            "GeoIP",
            'url':
            "https://updates.maxmind.com/geoip/databases/GeoLite2-City/update"
        })
        infos.append({
            'filename': "firehol_level1.netset",
            'label': "Firehol Level 1 netset",
            'description': "Firehol IPSET Level 1",
            'type': "ipv4_netset",
            'url': IPSET_VULTURE + "firehol_level1.netset"
        })
        infos.append({
            'filename': "vulture-v4.netset",
            'label': "Vulture Cloud IPv4",
            'description': "Vulture Cloud IPv4",
            'type': "ipv4_netset",
            'url': IPSET_VULTURE + "firehol_level1.netset"
        })
        infos.append({
            'filename': "vulture-v6.netset",
            'label': "Vulture Cloud IPv6",
            'description': "Vulture Cloud IPv6",
            'type': "ipv6_netset",
            'url': IPSET_VULTURE + "vulture-v6.netset"
        })

        for info in infos:
            filename = info['filename']
            label = info['label']
            description = info['description']
            entry_type = info['type']
            url = info.get('url', IPSET_VULTURE + filename)
            nb_netset = info.get('nb_netset', 0)
            nb_unique = info.get('nb_unique', 0)
            """ Create/update object """
            try:
                reputation_ctx = ReputationContext.objects.get(
                    filename=filename)
            except Exception as e:
                reputation_ctx = ReputationContext(filename=filename)
            reputation_ctx.name = label
            reputation_ctx.url = url
            reputation_ctx.db_type = entry_type
            reputation_ctx.label = label
            reputation_ctx.description = description
            reputation_ctx.nb_netset = nb_netset
            reputation_ctx.nb_unique = nb_unique
            reputation_ctx.internal = True
            # Use predator_apikey only for predator requests
            if "predator.vultureproject.org" in reputation_ctx.url:
                reputation_ctx.custom_headers = {
                    'Authorization': predator_token
                }
            else:
                reputation_ctx.custom_headers = {}
            reputation_ctx.save()
            logger.info("Reputation context {} created.".format(label))

    # On ALL nodes, write databases on disk
    # All internal reputation contexts are retrieved and created if needed
    # We can now download and write all reputation contexts
    for reputation_ctx in ReputationContext.objects.all():
        try:
            content = reputation_ctx.download()
        except Exception as e:
            logger.error(
                "Security_update::error: Failed to download reputation database '{}' : {}"
                .format(reputation_ctx.name, e))
            continue
        try:
            tmp_filename = "{}{}".format("/tmp/", get_random_string())
            with open(tmp_filename, "wb") as f:
                f.write(content)
            """ Immediatly reload the rsyslog service to prevent crash on MMDB access """
            # Filename is a variable of us (not injectable)
            reload_rsyslog = subprocess.run([
                '/usr/local/bin/sudo /bin/mv {} {}'
                '&& /usr/local/bin/sudo /usr/sbin/jexec '
                'rsyslog /usr/sbin/service rsyslogd reload'.format(
                    tmp_filename, reputation_ctx.absolute_filename)
            ],
                                            stdout=subprocess.PIPE,
                                            stderr=subprocess.PIPE,
                                            shell=True)
            if reload_rsyslog.returncode == 1:
                if "rsyslogd not running" in reload_rsyslog.stderr.decode(
                        'utf8'):
                    logger.info(
                        "Crontab::security_update: Database written and rsyslogd not runing."
                    )
                else:
                    logger.error(
                        "Crontab::security_update: It seems that the database cannot be written : {}"
                        .format(e))
            elif reload_rsyslog.returncode == 0:
                logger.info(
                    "Crontab::security_update: Database written and rsyslogd reloaded."
                )
            else:
                logger.error(
                    "Crontab::security_update: Database write failure : "
                    "stdout={}, stderr={}".format(
                        reload_rsyslog.stdout.decode('utf8'),
                        reload_rsyslog.stderr.decode('utf8')))
            logger.info(
                "Crontab::security_update: Reputation database named '{}' (file '{}') successfully written."
                .format(reputation_ctx.name, reputation_ctx.absolute_filename))
        except Exception as e:
            logger.error(
                "Security_update::error: Failed to write reputation database '{}' : {}"
                .format(reputation_ctx.name, e))

    logger.info("Security_update done.")

    return True
Exemple #12
0
def fetch_yara_rules(logger):
    logger.info("getting updated yara rules...")

    proxy = get_proxy()
    try:
        doc_uri = "https://github.com/Yara-Rules/rules/archive/master.zip"
        doc = requests.get(doc_uri, proxies=proxy)
    except Exception as e:
        logger.error("Yara::fetch_yara_rules:: {}".format(e), exc_info=1)
        raise

    logger.info("Yara::fetch_yara_rules:: extracting them...")
    try:
        with open("/var/tmp/yara_rules.zip", "wb") as f:
            f.write(doc.content)
        with zipfile.ZipFile("/var/tmp/yara_rules.zip") as z:
            z.extractall("/var/tmp/yara_rules/")
    except Exception as e:
        logger.error("Yara::fetch_yara_rules:: {}".format(e), exc_info=1)
        raise

    new_rules = []
    all_rules = []

    fileset = set()

    rule_regex = re.compile(r'^\s*rule .*$')

    for (baseRoot, baseDirs, baseFiles) in os.walk("/var/tmp/yara_rules/rules-master/"):
        for dir in baseDirs:
            for (root, dirs, files) in os.walk(os.path.join(baseRoot, dir)):
                for filename in files:
                    contains_rules = False
                    fullpath = os.path.join(root, filename)
                    name, extension = os.path.splitext(filename)
                    with open(fullpath, 'r', encoding='utf-8') as f:
                        for line in f:
                            if rule_regex.search(line):
                                contains_rules = True
                                continue
                    if contains_rules:
                        try:
                            subprocess.check_output(["/usr/local/bin/yara", fullpath, fullpath])
                            with open(fullpath, 'r', encoding='utf-8') as content_file:
                                filtered_lines = [line for line in content_file if "import " not in line]
                                rule, created = InspectionRule.objects.get_or_create(
                                    name=name,
                                    techno="yara",
                                    defaults={
                                        "category": dir,
                                        "content": ''.join(filtered_lines),
                                        "source": "github"
                                    }
                                )
                                if created:
                                    new_rules.append(rule)
                                all_rules.append(rule)
                                rule.save()
                        except subprocess.CalledProcessError:
                            pass
                        except Exception as e:
                            logger.error(e)

    logger.info("Yara::fetch_yara_rules:: finished importing new rules")

    if not new_rules and InspectionPolicy.objects.filter(name__exact='github_policy').count() != 0:
        return

    newInspectionPolicy, created = InspectionPolicy.objects.get_or_create(
        name="github_policy",
        defaults={
            "techno": "yara",
            "description": "automatic ruleset created from Yara rules on https://github.com/Yara-Rules/rules"
        }
    )
    if not created:
        for rule in new_rules:
            newInspectionPolicy.rules.add(rule)
    else:
        for rule in all_rules:
            if rule.category in DEFAULT_YARA_CATEGORIES:
                newInspectionPolicy.rules.add(rule)
    newInspectionPolicy.save()
    newInspectionPolicy.try_compile()