Exemple #1
0
def get_logger(component_name, level=logging.INFO, stdout=True) -> logging.Logger:
    """Get a pre-configured logging instance

    Args:
        component_name: The name of the service doing the logging.
        level: The minimum logging level
        stdout: If True, prints to console

    Returns: A logger instance
    """
    coloredlogs.DEFAULT_FIELD_STYLES = {'asctime': {'color': 'green'}, 'hostname': {'color': 'magenta'},
                                        'levelname': {'bold': True, 'color': 'black'},
                                        'name': {'color': 'cyan', 'bold': True},
                                        'programname': {'color': 'blue'}, 'username': {'color': 'yellow'}}

    utilities.makedirs(const.LOG_PATH, exist_ok=True)

    logger = logging.getLogger(component_name.upper())
    logger.setLevel(level)
    if not len(logger.handlers):
        fh = logging.FileHandler(os.path.join(const.LOG_PATH, 'dynamite-{}.log'.format(TODAY_FORMATTED_DATE)))
        fformatter = logging.Formatter(
            '%(asctime)s | %(name)20s | %(module)20s | %(funcName)45s | %(lineno)4s | %(levelname)8s |  %(message)s')
        fh.setFormatter(fformatter)
        logger.addHandler(fh)
    if stdout:
        coloredlogs.install(level=level, logger=logger,
                            fmt='%(asctime)s %(name)-25s %(levelname)-10s | %(message)s')
    logger.propagate = False
    return logger
Exemple #2
0
    def invoke(self, shell: Optional[bool] = False, cwd: Optional[str] = None) -> \
            List[Tuple[List, bytes, bytes]]:
        utilities.makedirs(f'{self.cert_directory}')
        if not cwd:
            cwd = self.cert_directory
        results = super().invoke(shell=shell, cwd=cwd)
        utilities.safely_remove_file(f'{self.cert_directory}/admin-key-temp.pem')
        utilities.safely_remove_file(f'{self.cert_directory}/admin.csr')
        utilities.set_ownership_of_file(path=self.cert_directory, user='******', group='dynamite')
        utilities.set_permissions_of_file(file_path=self.cert_directory, unix_permissions_integer=700)
        utilities.set_permissions_of_file(file_path=f'{self.cert_directory}/{self.cert_name}',
                                          unix_permissions_integer=600)
        utilities.set_permissions_of_file(file_path=f'{self.cert_directory}/{self.key_name}',
                                          unix_permissions_integer=600)
        utilities.set_permissions_of_file(file_path=f'{self.cert_directory}/{self.trusted_ca_cert_name}',
                                          unix_permissions_integer=600)
        utilities.set_permissions_of_file(file_path=f'{self.cert_directory}/{self.trusted_ca_key_name}',
                                          unix_permissions_integer=600)
        es_main_config = config.ConfigManager(self.configuration_directory)
        es_main_config.transport_pem_cert_file = f'security/auth/{self.cert_name}'
        es_main_config.rest_api_pem_cert_file = es_main_config.transport_pem_cert_file

        es_main_config.transport_pem_key_file = f'security/auth/{self.key_name}'
        es_main_config.rest_api_pem_key_file = es_main_config.transport_pem_key_file

        es_main_config.transport_trusted_cas_file = f'security/auth/{self.trusted_ca_cert_name}'
        es_main_config.rest_api_trusted_cas_file = es_main_config.transport_trusted_cas_file
        es_main_config.commit()
        return results
Exemple #3
0
    def install_default_mirrors_and_configurations(
            configurations_directory: Optional[
                str] = f'{const.INSTALL_CACHE}/configurations/',
            dynamite_config_root: Optional[str] = const.CONFIG_PATH,
            delta_set_name: Optional[str] = None):
        base_root_dir = f'{configurations_directory}/base/'
        for base_root, base_dirs, base_files in os.walk(base_root_dir):
            relative_directory = base_root.replace(base_root_dir, '')
            utilities.makedirs(f'{dynamite_config_root}/{relative_directory}')
            for base_file in base_files:
                relative_file = os.path.join(relative_directory, base_file)
                base_file_path = os.path.join(base_root_dir, relative_file)
                destination_file = f'{dynamite_config_root}/{relative_file}'
                shutil.copy2(base_file_path, destination_file)

        overwrite_root_dir = f'{configurations_directory}/deltas/{delta_set_name}/'
        if os.path.exists(overwrite_root_dir):
            for _type, action, overwrite_relative_path in get_deltas(
                    delta_set_name, configurations_directory):
                if _type == 'directory':
                    if action == 'create':
                        create_dir_path = f'{dynamite_config_root}/{overwrite_relative_path}'
                        utilities.makedirs(create_dir_path)
                elif _type == 'file':
                    if action in ['create', 'overwrite']:
                        source_create_overwrite_file_path = f'{overwrite_root_dir}/{overwrite_relative_path}'
                        dest_create_overwrite_file_path = f'{dynamite_config_root}/{overwrite_relative_path}'
                        shutil.copy2(source_create_overwrite_file_path,
                                     dest_create_overwrite_file_path)
Exemple #4
0
    def optimize(self) -> None:
        """Runs Kibana webpack optimizer among other things.

        Returns:
            None
        """
        environ = utilities.get_environment_file_dict()
        if not os.path.exists(PID_DIRECTORY):
            utilities.makedirs(PID_DIRECTORY)
        utilities.set_ownership_of_file(PID_DIRECTORY,
                                        user='******',
                                        group='dynamite')
        self.logger.info('Optimizing Kibana Libraries.')
        # Kibana initially has to be called as root due to a process forking issue when using runuser
        # builtin
        subprocess.call('{}/bin/kibana --optimize --allow-root'.format(
            environ['KIBANA_HOME'], ),
                        shell=True,
                        env=utilities.get_environment_file_dict(),
                        stderr=subprocess.PIPE,
                        stdout=subprocess.PIPE)
        # Pass permissions back to dynamite user
        utilities.set_ownership_of_file(environ['KIBANA_LOGS'],
                                        user='******',
                                        group='dynamite')
        utilities.set_ownership_of_file(environ['KIBANA_HOME'],
                                        user='******',
                                        group='dynamite')
Exemple #5
0
    def from_raw_text(cls,
                      raw_text,
                      install_directory=None,
                      backup_configuration_directory=None):
        """
        Alternative method for creating configuration file from raw text

        :param raw_text: The string representing the configuration file
        :param install_directory: The installation directory for Zeek
        :param backup_configuration_directory: The backup configuration directory

        :return: An instance of LocalNetworkConfigManager
        """
        tmp_dir = '/tmp/dynamite/temp_configs/'
        tmp_config = os.path.join(tmp_dir, 'etc', 'networks.cfg')
        utilities.makedirs(os.path.join(tmp_dir, 'etc'))
        with open(tmp_config, 'w') as out_f:
            out_f.write(raw_text)
        c = cls(install_directory=tmp_dir,
                backup_configuration_directory=backup_configuration_directory)
        if install_directory:
            c.install_directory = install_directory
        if backup_configuration_directory:
            c.backup_configuration_directory = backup_configuration_directory
        return c
Exemple #6
0
 def setup_oinkmaster(self):
     env_file = os.path.join(const.CONFIG_PATH, 'environment')
     self.logger.info("Installing Oinkmaster.")
     try:
         utilities.makedirs(self.install_directory, exist_ok=True)
     except Exception as e:
         self.logger.error("Failed to create required directory structure.")
         self.logger.debug("Failed to create required directory structure; {}".format(e))
         raise oinkmaster_exceptions.InstallOinkmasterError(
             "Failed to create required directory structure; {}".format(e))
     self.logger.info("Copying oinkmaster files.")
     try:
         utilities.copytree(os.path.join(const.INSTALL_CACHE, const.OINKMASTER_DIRECTORY_NAME),
                            self.install_directory)
     except Exception as e:
         self.logger.error(
             'Failed to copy {} -> {}.'.format(os.path.join(const.INSTALL_CACHE, const.OINKMASTER_DIRECTORY_NAME),
                                               self.install_directory))
         self.logger.debug(sys.stderr.write('Failed to copy {} -> {}: {}'.format(
             os.path.join(const.INSTALL_CACHE, const.OINKMASTER_DIRECTORY_NAME), self.install_directory, e)))
         raise oinkmaster_exceptions.InstallOinkmasterError(
             "General error while copying Oinkmaster from install cache; {}".format(e))
     if 'OINKMASTER_HOME' not in open(env_file).read():
         self.logger.info('Updating Oinkmaster default home path [{}]'.format(self.install_directory))
         subprocess.call('echo OINKMASTER_HOME="{}" >> {}'.format(self.install_directory, env_file),
                         shell=True)
     self.logger.info('PATCHING oinkmaster.conf with emerging-threats URL.')
     try:
         with open(os.path.join(self.install_directory, 'oinkmaster.conf'), 'a') as f:
             f.write('\nurl = http://rules.emergingthreats.net/open/suricata/emerging.rules.tar.gz')
     except Exception as e:
         self.logger.error('Failed to update oinkmaster.conf.')
         self.logger.debug('Failed to update oinkmaster.conf: {}.\n'.format(e))
         raise oinkmaster_exceptions.InstallOinkmasterError(
             "Failed to update oinkmaster configuration file; {}".format(e))
Exemple #7
0
 def setup(self):
     utilities.makedirs(self.install_directory)
     utilities.makedirs(f'{self.install_directory}/{self.directory_name}')
     self.copy_java_files_and_directories()
     self.create_update_java_environment_variables()
     utilities.set_ownership_of_file(f'{self.install_directory}/{self.directory_name}', user='******',
                                     group='dynamite')
Exemple #8
0
    def from_raw_text(cls,
                      raw_text,
                      configuration_directory=None,
                      backup_configuration_directory=None):
        """
        Alternative method for creating configuration file from raw text

        :param raw_text: The string representing the configuration file
        :param configuration_directory: The configuration directory for Zeek
        :param backup_configuration_directory: The backup configuration directory

        :return: An instance of NodeConfigManager
        """
        tmp_dir = '/tmp/dynamite/temp_configs/'
        tmp_config = os.path.join(tmp_dir, 'site', 'local.zeek')
        utilities.makedirs(os.path.join(tmp_dir, 'site'))
        with open(tmp_config, 'w') as out_f:
            out_f.write(raw_text)
        c = cls(configuration_directory=tmp_dir,
                backup_configuration_directory=backup_configuration_directory)
        if configuration_directory:
            c.configuration_directory = configuration_directory
        if backup_configuration_directory:
            c.backup_configuration_directory = backup_configuration_directory
        return c
Exemple #9
0
def update_default_configurations():
    """
    Retrieves the latest skeleton configurations for setting up ElasticSearch, LogStash, Zeek, and Suricata
    """

    shutil.rmtree(const.INSTALL_CACHE, ignore_errors=True)
    makedirs(const.DEFAULT_CONFIGS, exist_ok=True)
    try:
        download_file(const.DEFAULT_CONFIGS_URL,
                      const.DEFAULT_CONFIGS_ARCHIVE_NAME,
                      stdout=True)
    except Exception as e:
        raise exceptions.DownloadError(
            "General error occurred while downloading archive: {}; {}".format(
                os.path.join(const.INSTALL_CACHE, 'default_configs.tar.gz'),
                e))
    shutil.rmtree(const.DEFAULT_CONFIGS, ignore_errors=True)
    time.sleep(1)
    try:
        extract_archive(
            os.path.join(const.INSTALL_CACHE, 'default_configs.tar.gz'),
            const.CONFIG_PATH)
    except IOError as e:
        raise exceptions.ArchiveExtractionError(
            "General error occurred while extracting archive: {}; {}".format(
                os.path.join(const.INSTALL_CACHE, 'default_configs.tar.gz'),
                e))
Exemple #10
0
 def setup(self):
     utilities.makedirs(self.install_directory)
     self.create_update_oinkmaster_environment_variables()
     utilities.copytree(f'{const.INSTALL_CACHE}/{self.local_mirror_root}',
                        self.install_directory)
     with open(os.path.join(self.install_directory, 'oinkmaster.conf'),
               'a') as f:
         f.write(f'\nurl = {const.EMERGING_THREATS_OPEN}')
Exemple #11
0
 def _copy_elasticsearch_files_and_directories(self):
     config_paths = [
         'config/elasticsearch.yml', 'config/jvm.options',
         'config/log4j2.properties'
     ]
     install_paths = ['bin/', 'lib/', 'logs/', 'modules/', 'plugins/']
     path = None
     try:
         for path in config_paths:
             self.logger.debug('Copying {} -> {}'.format(
                 os.path.join(
                     const.INSTALL_CACHE,
                     '{}/{}'.format(const.ELASTICSEARCH_DIRECTORY_NAME,
                                    path)), self.configuration_directory))
             try:
                 shutil.copy(
                     os.path.join(
                         const.INSTALL_CACHE,
                         '{}/{}'.format(const.ELASTICSEARCH_DIRECTORY_NAME,
                                        path)),
                     self.configuration_directory)
             except shutil.Error:
                 self.logger.warning(
                     '{} already exists at this path.'.format(path))
     except Exception as e:
         self.logger.error(
             "General error while attempting to copy {} to {}.".format(
                 path, self.configuration_directory))
         self.logger.debug(
             "General error while attempting to copy {} to {}; {}".format(
                 path, self.configuration_directory, e))
         raise elastic_exceptions.InstallElasticsearchError(
             "General error while attempting to copy {} to {}; {}".format(
                 path, self.configuration_directory, e))
     try:
         for path in install_paths:
             src_install_path = os.path.join(
                 const.INSTALL_CACHE, const.ELASTICSEARCH_DIRECTORY_NAME,
                 path)
             dst_install_path = os.path.join(self.install_directory, path)
             self.logger.debug('Copying {} -> {}'.format(
                 src_install_path, dst_install_path))
             try:
                 utilities.makedirs(dst_install_path, exist_ok=True)
                 utilities.copytree(src_install_path, dst_install_path)
             except shutil.Error:
                 self.logger.warning(
                     '{} already exists at this path.'.format(path))
     except Exception as e:
         self.logger.error(
             "General error while attempting to copy {} to {}.".format(
                 path, self.install_directory))
         self.logger.debug(
             "General error while attempting to copy {} to {}; {}".format(
                 path, self.install_directory, e))
         raise elastic_exceptions.InstallElasticsearchError(
             "General error while attempting to copy {} to {}; {}".format(
                 path, self.install_directory, e))
Exemple #12
0
    def write_jvm_config(self):
        """
        Overwrites the JVM initial/max memory if settings were updated
        """
        new_output = ''
        jvm_options_path = os.path.join(self.configuration_directory,
                                        'jvm.options')
        try:
            with open(jvm_options_path) as config_f:
                for line in config_f.readlines():
                    if not line.startswith('#') and '-Xms' in line:
                        new_output += '-Xms' + str(
                            self.java_initial_memory) + 'g'
                    elif not line.startswith('#') and '-Xmx' in line:
                        new_output += '-Xmx' + str(
                            self.java_maximum_memory) + 'g'
                    else:
                        new_output += line
                    new_output += '\n'
        except IOError:
            raise general_exceptions.ReadJavaConfigError(
                "Could not locate {}".format(jvm_options_path))
        except Exception as e:
            raise general_exceptions.ReadJavaConfigError(
                "General Exception when opening/parsing environment config at {}; {}"
                .format(self.configuration_directory, e))

        backup_configurations = os.path.join(self.configuration_directory,
                                             'config_backups/')
        java_config_backup = os.path.join(
            backup_configurations,
            'jvm.options.backup.{}'.format(int(time.time())))
        try:
            utilities.makedirs(backup_configurations, exist_ok=True)
        except Exception as e:
            raise general_exceptions.WriteJavaConfigError(
                "General error while attempting to create backup directory at {}; {}"
                .format(backup_configurations, e))
        try:
            shutil.copy(
                os.path.join(self.configuration_directory, 'jvm.options'),
                java_config_backup)
        except Exception as e:
            raise general_exceptions.WriteJavaConfigError(
                "General error while attempting to copy old jvm.options file to {}; {}"
                .format(backup_configurations, e))
        try:
            with open(
                    os.path.join(self.configuration_directory, 'jvm.options'),
                    'w') as config_f:
                config_f.write(new_output)
        except IOError:
            raise general_exceptions.WriteJavaConfigError(
                "Could not locate {}".format(self.configuration_directory))
        except Exception as e:
            raise general_exceptions.WriteJavaConfigError(
                "General error while attempting to write new jvm.options file to {}; {}"
                .format(self.configuration_directory, e))
Exemple #13
0
def logstash_test_dir(request):
    testdir = '/tmp/dynamite/logstash_test'
    makedirs(testdir)
    def teardown():
        if os.path.exists(testdir) and os.path.isdir(testdir):
            print("tearing down logstash test dir")
            shutil.rmtree(testdir)
    request.addfinalizer(teardown)

    return testdir
Exemple #14
0
    def setUp(self):
        self.config_root = '/etc/dynamite/test'
        self.config_directory = os.path.join(self.config_root, 'logstash')

        # Setup Test Space
        utilities.makedirs(self.config_directory, exist_ok=True)
        create_dummy_logstashyaml(self.config_directory)
        create_dummy_javaopts(self.config_directory)

        self.config_manager = config.ConfigManager(
            configuration_directory=self.config_directory)
Exemple #15
0
    def write_config(self):
        """
        Overwrite the existing suricata.yaml config with changed values
        """
        def update_dict_from_path(path, value):
            """
            :param path: A tuple representing each level of a nested path in the yaml document
                        ('vars', 'address-groups', 'HOME_NET') = /vars/address-groups/HOME_NET
            :param value: The new value
            :return: None
            """
            partial_config_data = self.config_data
            for i in range(0, len(path) - 1):
                partial_config_data = partial_config_data[path[i]]
            partial_config_data.update({path[-1]: value})

        timestamp = int(time.time())
        backup_configurations = os.path.join(self.configuration_directory,
                                             'config_backups/')
        suricata_config_backup = os.path.join(
            backup_configurations, 'suricata.yaml.backup.{}'.format(timestamp))
        try:
            utilities.makedirs(backup_configurations, exist_ok=True)
        except Exception as e:
            raise suricata_exceptions.WriteSuricataConfigError(
                "General error while attempting to create backup directory at {}; {}"
                .format(backup_configurations, e))
        try:
            shutil.copy(
                os.path.join(self.configuration_directory, 'suricata.yaml'),
                suricata_config_backup)
        except Exception as e:
            raise suricata_exceptions.WriteSuricataConfigError(
                "General error while attempting to copy old suricata.yaml file to {}; {}"
                .format(backup_configurations, e))

        for k, v in vars(self).items():
            if k not in self.tokens:
                continue
            token_path = self.tokens[k]
            update_dict_from_path(token_path, v)
        try:
            with open(
                    os.path.join(self.configuration_directory,
                                 'suricata.yaml'), 'w') as configyaml:
                configyaml.write('%YAML 1.1\n---\n\n')
                dump(self.config_data, configyaml, default_flow_style=False)
        except IOError:
            raise suricata_exceptions.WriteSuricataConfigError(
                "Could not locate {}".format(self.configuration_directory))
        except Exception as e:
            raise suricata_exceptions.WriteSuricataConfigError(
                "General error while attempting to write new suricata.yaml file to {}; {}"
                .format(self.configuration_directory, e))
Exemple #16
0
    def setup(self, inspect_interfaces: List[str]):
        """Setup Zeek
        Args:
            inspect_interfaces: A list of network interfaces to capture on (E.G ["mon0", "mon1"])
        Returns:
            None
        """
        if not self.skip_interface_validation:
            if not self.validate_inspect_interfaces(inspect_interfaces):
                raise install.NetworkInterfaceNotFound(inspect_interfaces)
        sysctl = systemctl.SystemCtl()
        self.install_zeek_dependencies()
        self.create_update_zeek_environment_variables()
        self.logger.debug(f'Creating directory: {self.configuration_directory}')
        utilities.makedirs(self.configuration_directory)
        self.logger.debug(f'Creating directory: {self.install_directory}')
        utilities.makedirs(self.install_directory)
        self.logger.info('Setting up Zeek from source. This can take up to 15 minutes.')
        if self.stdout:
            utilities.print_coffee_art()
        self.configure_compile_zeek()
        self.logger.info('Setting up Zeek package manager.')
        zkg_installer = zkg_install.InstallManager()
        zkg_installer.setup()
        package.InstallPackageManager(const.ZEEK_PACKAGES, stdout=self.stdout, verbose=self.verbose).setup()

        self.copy_file_or_directory_to_destination(f'{const.DEFAULT_CONFIGS}/zeek/broctl-nodes.cfg',
                                                   f'{self.install_directory}/etc/node.cfg')
        self.copy_file_or_directory_to_destination(f'{const.DEFAULT_CONFIGS}/zeek/local.zeek',
                                                   f'{self.configuration_directory}/site/local.zeek')

        # Optimize Configurations
        site_local_config = config.SiteLocalConfigManager(self.configuration_directory, stdout=self.stdout,
                                                          verbose=self.verbose)
        node_config = config.NodeConfigManager(self.install_directory, stdout=self.stdout, verbose=self.verbose)
        node_config.workers = node.Workers()
        for worker in node_config.get_optimal_zeek_worker_config(inspect_interfaces):
            node_config.workers.add_worker(
                worker=worker
            )
        self.logger.info('Applying node configuration.')
        node_config.commit()

        # Fix Permissions
        self.logger.info('Setting up file permissions.')
        utilities.set_ownership_of_file(self.configuration_directory, user='******', group='dynamite')
        utilities.set_ownership_of_file(self.install_directory, user='******', group='dynamite')

        self.logger.info(f'Installing service -> {const.DEFAULT_CONFIGS}/systemd/zeek.service')
        sysctl.install_and_enable(os.path.join(const.DEFAULT_CONFIGS, 'systemd', 'zeek.service'))
Exemple #17
0
    def analyze(cls, pcap_path, name=None, description=None, keep_pcap=True):
        """
        Given a PCAP path on disk; analyze that pcap with Zeek storing the results in a directory
        deterministically identified by hashing the pcap file.

        :param pcap_path: The path to the pcap file on disk
        :param name: The name of the pcap (short descriptor)
        :param description: A long description for the pcap
        :param keep_pcap: If True, we'll save a copy of the pcap to disk after analysis
        :return: A ZeekReplay instance
        """
        if name:
            name = re.sub("[^0-9a-zA-Z]+", "", name)[0:64]
        if description:
            description = description[0:1024]
        environment_variables = utilities.get_environment_file_dict()
        install_directory = environment_variables.get('ZEEK_HOME')
        scripts_directory = environment_variables.get('ZEEK_SCRIPTS')
        pcap_replay_id = utilities.get_filepath_md5_hash(pcap_path)
        replay_session = os.path.join(REPLAY_ROOT, str(pcap_replay_id))
        utilities.makedirs(replay_session)
        zeek_bin_path = os.path.join(install_directory, 'bin', 'zeek')
        zeek_scripts_config = config.ScriptConfigManager(scripts_directory)
        command = 'cd {}; {} -r {} {} -C'.format(
            replay_session, zeek_bin_path, pcap_path,
            ' '.join(zeek_scripts_config.list_enabled_scripts()))
        child = Popen(command,
                      shell=True,
                      stdin=PIPE,
                      stdout=PIPE,
                      stderr=PIPE,
                      close_fds=True)
        child.communicate()

        # Write our metadata to disk
        with open(os.path.join(replay_session, '.metadata'), 'a') as meta_f:
            meta_f.write(
                json.dumps({
                    'time': time.time(),
                    'name': name,
                    'description': description
                }) + '\n')

        # Copy over the pcap if we want to keep it.
        if keep_pcap:
            shutil.copy(pcap_path,
                        os.path.join(replay_session, pcap_replay_id + '.pcap'))

        return cls(pcap_replay_id)
Exemple #18
0
    def setup_jupyterhub(self):
        """
        Sets up jupyterhub configuration; and creates required user for initial login
        """

        env_file = os.path.join(const.CONFIG_PATH, 'environment')
        self.logger.info("Creating lab directories and files.")
        source_config = os.path.join(const.DEFAULT_CONFIGS, 'dynamite_lab',
                                     'jupyterhub_config.py')
        try:
            utilities.makedirs(self.configuration_directory, exist_ok=True)
        except Exception as e:
            self.logger.error(
                "General error occurred while attempting to create root directories."
            )
            self.logger.debug(
                "General error occurred while attempting to create root directories; {}"
                .format(e))
            raise lab_exceptions.InstallLabError(
                "General error occurred while attempting to create root directories; {}"
                .format(e))
        try:
            with open(env_file) as env_f:
                if 'DYNAMITE_LAB_CONFIG' not in env_f.read():
                    self.logger.info(
                        'Updating Dynamite Lab Config path [{}]'.format(
                            self.configuration_directory))
                    subprocess.call(
                        'echo DYNAMITE_LAB_CONFIG="{}" >> {}'.format(
                            self.configuration_directory, env_file),
                        shell=True)
        except IOError:
            self.logger.error(
                "Failed to open {} for reading.".format(env_file))
            raise lab_exceptions.InstallLabError(
                "Failed to open {} for reading.".format(env_file))
        except Exception as e:
            self.logger.error(
                "General error while creating environment variables in {}.".
                format(env_file))
            self.logger.debug(
                "General error while creating environment variables in {}; {}".
                format(env_file, e))
            raise lab_exceptions.InstallLabError(
                "General error while creating environment variables in {}; {}".
                format(env_file, e))
        shutil.copy(source_config, self.configuration_directory)
        self._link_jupyterhub_binaries()
Exemple #19
0
 def write_config(self):
     """
     Overwrite the existing local.zeek config with changed values
     """
     timestamp = int(time.time())
     output_str = ''
     backup_configurations = os.path.join(self.configuration_directory,
                                          'config_backups/')
     zeek_config_backup = os.path.join(
         backup_configurations, 'local.zeek.backup.{}'.format(timestamp))
     try:
         utilities.makedirs(backup_configurations, exist_ok=True)
     except Exception as e:
         raise zeek_exceptions.WriteZeekConfigError(
             "General error while attempting to create backup directory at {}; {}"
             .format(backup_configurations, e))
     for e_script in self.list_enabled_scripts():
         output_str += '@load {}\n'.format(e_script)
     for d_script in self.list_disabled_scripts():
         output_str += '#@load {}\n'.format(d_script)
     for e_sig in self.list_enabled_sigs():
         output_str += '@load-sigs {}\n'.format(e_sig)
     for d_sig in self.list_disabled_sigs():
         output_str += '@load-sigs {}\n'.format(d_sig)
     for rdef, val in self.list_redefinitions():
         output_str += 'redef {} = {}\n'.format(rdef, val)
     try:
         shutil.copy(
             os.path.join(self.configuration_directory, 'site',
                          'local.zeek'), zeek_config_backup)
     except Exception as e:
         raise zeek_exceptions.WriteZeekConfigError(
             "General error while attempting to copy old local.zeek file to {}; {}"
             .format(backup_configurations, e))
     try:
         with open(
                 os.path.join(self.configuration_directory, 'site',
                              'local.zeek'), 'w') as f:
             f.write(output_str)
     except IOError:
         raise zeek_exceptions.WriteZeekConfigError(
             "Could not locate {}".format(self.configuration_directory))
     except Exception as e:
         raise zeek_exceptions.WriteZeekConfigError(
             "General error while attempting to write new local.zeek file to {}; {}"
             .format(backup_configurations, e))
Exemple #20
0
def logstash_test_config_yaml(logstash_test_dir):
    confdata = {
        'node.name': 'testing_ls_node',
        'path.data': '/tmp/dynamite/logstash_test/data/',
        'path.logs':
        '/var/log/dynamite/logstash',
        'pipeline.batch.delay': 50,
        'pipeline.batch.size': 125
    }
    path = f"{logstash_test_dir}/logstash.yml"
    if os.path.exists(path):
        os.remove(path)
    makedirs(logstash_test_dir)
    os.mknod(path)
    with open(path, 'w') as yamlfile:
        yamlfile.write(yaml.dump(confdata))
    return path
Exemple #21
0
    def start(self):
        """
        Start the JupyterHub process

        :return: True, if started successfully
        """
        def start_shell_out():
            subprocess.call('jupyterhub -f {} &>/dev/null &'.format(
                os.path.join(self.configuration_directory,
                             'jupyterhub_config.py')),
                            shell=True,
                            stderr=subprocess.PIPE,
                            stdout=None)

        utilities.makedirs(PID_DIRECTORY, exist_ok=True)

        if not utilities.check_pid(self.pid):
            Process(target=start_shell_out).start()
        else:
            self.logger.info(
                'JupyterHub is already running on PID [{}]'.format(self.pid))
            return True
        retry = 0
        self.pid = -1
        time.sleep(5)
        while retry < 6:
            try:
                with open(os.path.join(PID_DIRECTORY, 'jupyterhub.pid')) as f:
                    self.pid = int(f.read())
                start_message = '[Attempt: {}] Starting JupyterHub on PID [{}]'.format(
                    retry + 1, self.pid)
                self.logger.info(start_message)
                if not utilities.check_pid(self.pid):
                    retry += 1
                    time.sleep(5)
                else:
                    return True
            except IOError as e:
                self.logger.warning(
                    "An issue occurred while attempting to start.")
                self.logger.debug(
                    "An issue occurred while attempting to start; {}".format(
                        e))
                retry += 1
                time.sleep(3)
        return False
Exemple #22
0
    def write_elasticsearch_config(self):

        def update_dict_from_path(path, value):
            """
            :param path: A tuple representing each level of a nested path in the yaml document
                        ('vars', 'address-groups', 'HOME_NET') = /vars/address-groups/HOME_NET
            :param value: The new value
            """

            partial_config_data = self.config_data
            for i in range(0, len(path) - 1):
                try:
                    partial_config_data = partial_config_data[path[i]]
                except KeyError:
                    pass
            partial_config_data.update({path[-1]: value})

        timestamp = int(time.time())
        backup_configurations = os.path.join(self.configuration_directory, 'config_backups/')
        elastic_config_backup = os.path.join(backup_configurations, 'elastic.yml.backup.{}'.format(timestamp))
        try:
            utilities.makedirs(backup_configurations, exist_ok=True)
        except Exception as e:
            raise elastic_exceptions.WriteElasticConfigError(
                "General error while attempting to create backup directory at {}; {}".format(backup_configurations, e))
        try:
            shutil.copy(os.path.join(self.configuration_directory, 'elasticsearch.yml'), elastic_config_backup)
        except Exception as e:
            raise elastic_exceptions.WriteElasticConfigError(
                "General error while attempting to copy old elasticsearch.yml file to {}; {}".format(
                    backup_configurations, e))
        for k, v in vars(self).items():
            if k not in self.tokens:
                continue
            token_path = self.tokens[k]
            update_dict_from_path(token_path, v)
        try:
            with open(os.path.join(self.configuration_directory, 'elasticsearch.yml'), 'w') as configyaml:
                dump(self.config_data, configyaml, default_flow_style=False)
        except IOError:
            raise elastic_exceptions.WriteElasticConfigError("Could not locate {}".format(self.configuration_directory))
        except Exception as e:
            raise elastic_exceptions.WriteElasticConfigError(
                "General error while attempting to write new elasticsearch.yml file to {}; {}".format(
                    self.configuration_directory, e))
Exemple #23
0
    def __init__(self, stdout=True, verbose=False):
        log_level = logging.INFO
        if verbose:
            log_level = logging.DEBUG
        self.logger = get_logger('LAB', level=log_level, stdout=stdout)

        self.environment_variables = utilities.get_environment_file_dict()
        self.configuration_directory = self.environment_variables.get(
            'DYNAMITE_LAB_CONFIG')
        utilities.makedirs(PID_DIRECTORY, exist_ok=True)
        utilities.set_ownership_of_file(PID_DIRECTORY,
                                        user='******',
                                        group='dynamite')
        try:
            with open(os.path.join(PID_DIRECTORY, 'jupyterhub.pid')) as pid_f:
                self.pid = int(pid_f.read())
        except (IOError, ValueError):
            self.pid = -1
Exemple #24
0
 def _get_pid(pid_file):
     pid = None
     h, t = os.path.split(pid_file)
     utilities.makedirs(h, exist_ok=True)
     try:
         utilities.set_ownership_of_file(h)
     # PID file does not exist
     except IOError:
         pass
     # dynamite user does not exist
     except KeyError:
         pass
     try:
         with open(pid_file) as pid_f:
             pid = int(pid_f.read())
     except (IOError, ValueError):
         pass
     return pid
Exemple #25
0
def get_logger(component_name, level=logging.INFO, stdout=True):
    coloredlogs.DEFAULT_FIELD_STYLES = {
        'asctime': {
            'color': 'green'
        },
        'hostname': {
            'color': 'magenta'
        },
        'levelname': {
            'bold': True,
            'color': 'black'
        },
        'name': {
            'color': 'cyan',
            'bold': True
        },
        'programname': {
            'color': 'blue'
        },
        'username': {
            'color': 'yellow'
        }
    }

    utilities.makedirs(const.LOG_PATH, exist_ok=True)
    today_formatted_date = datetime.strftime(datetime.today(), '%d-%m-%Y')
    logger = logging.getLogger(component_name)
    logger.setLevel(level)
    if not len(logger.handlers):
        fh = logging.FileHandler(
            os.path.join(const.LOG_PATH,
                         'dynamite-{}.log'.format(today_formatted_date)))
        fformatter = logging.Formatter(
            '%(asctime)s | %(name)15s | %(module)20s | %(funcName)45s | %(lineno)4s | %(levelname)8s |  %(message)s'
        )
        fh.setFormatter(fformatter)
        logger.addHandler(fh)
    if stdout:
        coloredlogs.install(
            level=level,
            logger=logger,
            fmt='%(asctime)s %(name)-15s %(levelname)-10s | %(message)s')
    logger.propagate = False
    return logger
Exemple #26
0
 def from_raw_text(cls,
                   raw_text: str,
                   installation_directory: Optional[str] = None):
     """Alternative method for creating configuration file from raw text
     Args:
         raw_text: The string representing the configuration file
         install_directory: The installation directory where the config file resides
     Returns:
          An instance of ConfigManager
     """
     tmp_dir = '/tmp/dynamite/temp_configs/etc'
     tmp_config = f'{tmp_dir}/networks.cfg'
     utilities.makedirs(tmp_dir)
     with open(tmp_config, 'w') as out_f:
         out_f.write(raw_text)
     c = cls(installation_directory=f"{tmp_dir}/../")
     if installation_directory:
         c.installation_directory = installation_directory
     return c
Exemple #27
0
 def _create_kibana_directories(self):
     self.logger.info('Creating Kibana installation, configuration, and logging directories.')
     try:
         utilities.makedirs(self.install_directory, exist_ok=True)
         utilities.makedirs(self.configuration_directory, exist_ok=True)
         utilities.makedirs(self.log_directory, exist_ok=True)
         utilities.makedirs(os.path.join(self.install_directory, 'data'), exist_ok=True)
     except Exception as e:
         self.logger.error('Failed to create required directory structure.')
         self.logger.debug('Failed to create required directory structure; {}'.format(e))
         raise kibana_exceptions.InstallKibanaError(
             "Failed to create required directory structure; {}".format(e))
Exemple #28
0
 def from_raw_text(cls,
                   raw_text: str,
                   install_directory: Optional[str] = None):
     """Alternative method for creating configuration file from raw text
     Args:
         raw_text: The string representing the configuration file
         install_directory: The install directory for Filebeat
     Returns:
          An instance of ConfigManager
     """
     tmp_dir = '/tmp/dynamite/temp_configs'
     tmp_config = f'{tmp_dir}/filebeat.yml'
     utilities.makedirs(tmp_dir)
     with open(tmp_config, 'w') as out_f:
         out_f.write(raw_text)
     c = cls(install_directory=tmp_dir)
     if install_directory:
         c.install_directory = install_directory
     return c
Exemple #29
0
    def __init__(self, name: str, verbose: Optional[bool] = False, stdout: Optional[bool] = True,
                 log_level=logging.INFO):
        """
        Build a custom service installer

        Args:
            name: The name of the service
            stdout: Print output to console
            verbose: Include detailed debug messages
            log_level: The minimum logging.LOG_LEVEL to be handled
        """
        if verbose:
            log_level = logging.DEBUG
        self.stdout = stdout
        self.verbose = verbose
        self.logger = get_logger(str(name).upper(), level=log_level, stdout=stdout)
        self.dynamite_environ = utilities.get_environment_file_dict()
        utilities.create_dynamite_user()
        utilities.makedirs(const.PID_PATH, exist_ok=True)
        utilities.set_ownership_of_file(const.PID_PATH, user='******', group='dynamite')
Exemple #30
0
 def from_raw_text(
         cls,
         raw_text: str,
         configuration_directory: Optional[str] = None) -> ConfigManager:
     """Alternative method for creating configuration file from raw text
     Args:
         raw_text: The string representing the configuration file
         configuration_directory: The configuration directory for Suricata
     Returns:
          An instance of ConfigManager
     """
     tmp_dir = '/tmp/dynamite/temp_configs/'
     tmp_config = f'{tmp_dir}/suricata.yaml'
     utilities.makedirs(tmp_dir)
     with open(tmp_config, 'w') as out_f:
         out_f.write(raw_text)
     c = cls(configuration_directory=tmp_dir)
     if configuration_directory:
         c.configuration_directory = configuration_directory
     return c