示例#1
0
def check_output_directory(output_dir):
    """ Check that the output directory exists, creating it if it doesn't. """
    if not os.path.exists(output_dir):
        logger.info("Creating directory %s." % (output_dir))
        try:
            os.makedirs(output_dir, mode=0o770)
        except Exception as err:
            raise exceptions.ApplicationError(
                "Failed to create directory %s: %s" % (output_dir, err))
示例#2
0
def update_sources():
    global local_index_filename
    local_index_filename = sources.get_index_filename()
    initial_content = get_initial_content()
    with io.BytesIO() as fileobj:
        url = sources.get_source_index_url()
        logger.info("Downloading %s", url)
        try:
            net.get(url, fileobj)
        except Exception as err:
            raise exceptions.ApplicationError(
                "Failed to download index: %s: %s" % (url, err))
        if not os.path.exists(config.get_cache_dir()):
            try:
                os.makedirs(config.get_cache_dir())
            except Exception as err:
                logger.error("Failed to create directory %s: %s",
                             config.get_cache_dir(), err)
                return 1
        write_and_compare(initial_content=initial_content, fileobj=fileobj)
示例#3
0
def load_filters(filename):

    filters = []

    with open(filename) as fileobj:
        for line in fileobj:
            line = line.strip()
            if not line or line.startswith("#"):
                continue
            line = line.rsplit(" #")[0]

            line = re.sub(r'\\\$', '$', line)  # needed to escape $ in pp
            try:
                rule_filter = matchers_mod.ModifyRuleFilter.parse(line)
                filters.append(rule_filter)
            except Exception as err:
                raise exceptions.ApplicationError(
                    "Failed to parse modify filter: {}".format(line))

    return filters
示例#4
0
def update_sources():
    local_index_filename = sources.get_index_filename()
    with io.BytesIO() as fileobj:
        url = sources.get_source_index_url()
        logger.info("Downloading %s", url)
        try:
            net.get(url, fileobj)
        except Exception as err:
            raise exceptions.ApplicationError(
                "Failed to download index: %s: %s" % (url, err))
        if not os.path.exists(config.get_cache_dir()):
            try:
                os.makedirs(config.get_cache_dir())
            except Exception as err:
                logger.error("Failed to create directory %s: %s",
                             config.get_cache_dir(), err)
                return 1
        with open(local_index_filename, "wb") as outobj:
            outobj.write(fileobj.getvalue())
        logger.info("Saved %s", local_index_filename)
示例#5
0
def load_sources(suricata_version):
    files = {}

    urls = []

    http_header = None
    checksum = True

    # Add any URLs added with the --url command line parameter.
    if config.args().url:
        for url in config.args().url:
            urls.append((url, http_header, checksum))

    # Get the new style sources.
    enabled_sources = sources.get_enabled_sources()

    # Convert the Suricata version to a version string.
    version_string = "%d.%d.%d" % (
        suricata_version.major, suricata_version.minor, suricata_version.patch)

    # Construct the URL replacement parameters that are internal to
    # suricata-update.
    internal_params = {"__version__": version_string}

    # If we have new sources, we also need to load the index.
    if enabled_sources:
        index_filename = sources.get_index_filename()
        if not os.path.exists(index_filename):
            logger.warning("No index exists, will use bundled index.")
            logger.warning("Please run suricata-update update-sources.")
        if os.path.exists(index_filename) and time.time() - \
                os.stat(index_filename).st_mtime > INDEX_EXPIRATION_TIME:
            logger.warning(
                "Source index is older than 2 weeks. "
                "Please update with suricata-update update-sources.")
        index = sources.Index(index_filename)

        for (name, source) in enabled_sources.items():
            params = source["params"] if "params" in source else {}
            params.update(internal_params)
            if "url" in source:
                # No need to go off to the index.
                http_header = source.get("http_header")
                checksum = source.get("checksum")
                url = (source["url"] % params, http_header, checksum)
                logger.debug("Resolved source %s to URL %s.", name, url[0])
            else:
                if not index:
                    raise exceptions.ApplicationError(
                        "Source index is required for source %s; "
                        "run suricata-update update-sources" %
                        (source["source"]))
                source_config = index.get_source_by_name(name)
                try:
                    checksum = source_config["checksum"]
                except:
                    checksum = True
                url = (index.resolve_url(name, params), http_header, checksum)
                logger.debug("Resolved source %s to URL %s.", name, url)
            urls.append(url)

    if config.get("sources"):
        for url in config.get("sources"):
            if not isinstance(url, str):
                raise exceptions.InvalidConfigurationError(
                    "Invalid datatype for source URL: %s" % (str(url)))
            url = (url % internal_params, http_header, checksum)
            logger.debug("Adding source %s.", url)
            urls.append(url)

    # If --etopen is on the command line, make sure its added. Or if
    # there are no URLs, default to ET/Open.
    if config.get("etopen") or not urls:
        if not config.args().offline and not urls:
            logger.info(
                "No sources configured, will use Emerging Threats Open")
        urls.append(
            (sources.get_etopen_url(internal_params), http_header, checksum))

    # Converting the URLs to a set removed dupes.
    urls = set(urls)

    # Now download each URL.
    for url in urls:
        Fetch().run(url, files)

    # Now load local rules.
    if config.get("local") is not None:
        for local in config.get("local"):
            load_local(local, files)

    return files
示例#6
0
def load_sources(suricata_version):
    files = {}

    urls = []

    # Add any URLs added with the --url command line parameter.
    if config.args().url:
        for url in config.args().url:
            urls.append(url)

    # Get the new style sources.
    enabled_sources = sources.get_enabled_sources()

    # Convert the Suricata version to a version string.
    version_string = "%d.%d.%d" % (
        suricata_version.major, suricata_version.minor, suricata_version.patch)

    # Construct the URL replacement parameters that are internal to
    # suricata-update.
    internal_params = {"__version__": version_string}

    # If we have new sources, we also need to load the index.
    if enabled_sources:
        index_filename = os.path.join(config.get_cache_dir(),
                                      sources.SOURCE_INDEX_FILENAME)
        if os.path.exists(index_filename):
            index = sources.Index(index_filename)
        else:
            index = None

        for (name, source) in enabled_sources.items():
            params = source["params"] if "params" in source else {}
            params.update(internal_params)
            if "url" in source:
                # No need to go off to the index.
                url = source["url"] % params
            else:
                if not index:
                    raise exceptions.ApplicationError(
                        "Source index is required for source %s; "
                        "run suricata-update update-sources" %
                        (source["source"]))
                url = index.resolve_url(name, params)
            logger.debug("Resolved source %s to URL %s.", name, url)
            urls.append(url)

    if config.get("sources"):
        for url in config.get("sources"):
            if type(url) not in [type("")]:
                raise exceptions.InvalidConfigurationError(
                    "Invalid datatype for source URL: %s" % (str(url)))
            url = url % internal_params
            logger.debug("Adding source %s.", url)
            urls.append(url)

    # If --etopen is on the command line, make sure its added. Or if
    # there are no URLs, default to ET/Open.
    if config.get("etopen") or not urls:
        if not urls:
            logger.info(
                "No sources configured, will use Emerging Threats Open")
        urls.append(sources.get_etopen_url(internal_params))

    # Converting the URLs to a set removed dupes.
    urls = set(urls)

    # Now download each URL.
    for url in urls:
        Fetch().run(url, files)

    # Now load local rules.
    for local in config.get("local"):
        load_local(local, files)

    return files