def update_default_configurations(): """ Retrieves the latest skeleton configurations for setting up ElasticSearch, LogStash, Zeek, and Suricata """ shutil.rmtree(const.INSTALL_CACHE, ignore_errors=True) makedirs(const.DEFAULT_CONFIGS, exist_ok=True) try: download_file(const.DEFAULT_CONFIGS_URL, const.DEFAULT_CONFIGS_ARCHIVE_NAME, stdout=True) except Exception as e: raise exceptions.DownloadError( "General error occurred while downloading archive: {}; {}".format( os.path.join(const.INSTALL_CACHE, 'default_configs.tar.gz'), e)) shutil.rmtree(const.DEFAULT_CONFIGS, ignore_errors=True) time.sleep(1) try: extract_archive( os.path.join(const.INSTALL_CACHE, 'default_configs.tar.gz'), const.CONFIG_PATH) except IOError as e: raise exceptions.ArchiveExtractionError( "General error occurred while extracting archive: {}; {}".format( os.path.join(const.INSTALL_CACHE, 'default_configs.tar.gz'), e))
def download_from_mirror(self, mirror_path: str) -> Tuple[str, str, Optional[str]]: """Download a Dynamite service from a mirror Args: mirror_path: The path to the mirror Returns: The mirror url, archive name, and directory name (once the archive has been extracted) """ with open(mirror_path) as mirror_f: res, err = None, None for mirror in mirror_f.readlines(): try: url, archive_name, dir_name = [token.strip() for token in mirror.split(',')] except ValueError: url = mirror archive_name = os.path.basename(url) dir_name = None self.logger.info("Downloading {} from {}".format(archive_name, url)) fqdn_dir_name = f'{const.INSTALL_CACHE}/{str(dir_name)}' if os.path.exists(fqdn_dir_name): shutil.rmtree(fqdn_dir_name, ignore_errors=True) try: res = utilities.download_file(url, archive_name, stdout=self.stdout) except Exception as e: res, err = False, e self.logger.warning(f'Failed to download {archive_name} from {url}; {e}') if res: break if not res: self.logger.error(f'An error occurred while attempting to download: {err}') raise exceptions.DownloadError( f'General error while attempting to download {archive_name} from all mirrors.') return url, archive_name, dir_name
def update_mirrors(): """ Retrieves the latest mirrors which contain the download locations for all components :return: True, if retrieved successfully """ create_dynamite_root_directory() download_file('https://github.com/DynamiteAI/dynamite-nsm-configs/raw/master/mirrors.tar.gz', 'mirrors.tar.gz', stdout=True) shutil.rmtree(const.MIRRORS, ignore_errors=True) try: print('Copying mirrors -> /etc/dynamite/mirrors') extract_archive(os.path.join(const.INSTALL_CACHE, 'mirrors.tar.gz'), '/etc/dynamite/') return True except IOError as e: sys.stderr.write('[-] An error occurred while attempting to extract file. [{}]\n'.format(e)) return False
def download_kibana(stdout=False): """ Download Kibana archive :param stdout: Print output to console """ for url in open(const.KIBANA_MIRRORS, 'r').readlines(): if utilities.download_file(url, const.KIBANA_ARCHIVE_NAME, stdout=stdout): break
def update_mirrors(): """ Retrieves the latest mirrors which contain the download locations for all components :return: True, if retrieved successfully """ shutil.rmtree(const.INSTALL_CACHE, ignore_errors=True) create_dynamite_root_directory() download_file(const.MIRRORS_CONFIG_URL, const.MIRRORS_CONFIG_ARCHIVE_NAME, stdout=True) shutil.rmtree(const.MIRRORS, ignore_errors=True) try: sys.stdout.write('[+] Copying mirrors -> {}\n'.format(const.MIRRORS)) extract_archive(os.path.join(const.INSTALL_CACHE, 'mirrors.tar.gz'), const.CONFIG_PATH) return True except IOError as e: sys.stderr.write('[-] An error occurred while attempting to extract file. [{}]\n'.format(e)) return False
def download_suricata(stdout=False): """ Download Suricata archive :param stdout: Print output to console """ for url in open(const.SURICATA_MIRRORS, 'r').readlines(): if utilities.download_file(url, const.SURICATA_ARCHIVE_NAME, stdout=stdout): break
def download_logstash(stdout=False): """ Download Logstash archive :param stdout: Print output to console """ for url in open(const.LOGSTASH_MIRRORS, 'r').readlines(): if utilities.download_file(url, const.LOGSTASH_ARCHIVE_NAME, stdout=stdout): break
def download_pf_ring(stdout=False): """ Download PF_RING archive :param stdout: Print output to console """ for url in open(const.PF_RING_MIRRORS, 'r').readlines(): if utilities.download_file(url, const.PF_RING_ARCHIVE_NAME, stdout=stdout): break
def update_default_configurations(): """ Retrieves the latest skeleton configurations for setting up ElasticSearch, LogStash, Zeek, and Suricata :return: True, if retrieved successfully """ create_dynamite_root_directory() download_file('https://github.com/DynamiteAI/dynamite-nsm-configs/raw/master/default_configs.tar.gz', 'default_configs.tar.gz', stdout=True) shutil.rmtree(const.DEFAULT_CONFIGS, ignore_errors=True) time.sleep(1) try: print('Copying default_configs -> /etc/dynamite/default_configs') extract_archive(os.path.join(const.INSTALL_CACHE, 'default_configs.tar.gz'), '/etc/dynamite/') return True except IOError as e: sys.stderr.write('[-] An error occurred while attempting to extract file. [{}]\n'.format(e)) return False
def update_default_configurations(): """ Retrieves the latest skeleton configurations for setting up ElasticSearch, LogStash, Zeek, and Suricata :return: True, if retrieved successfully """ shutil.rmtree(const.INSTALL_CACHE, ignore_errors=True) create_dynamite_root_directory() download_file(const.DEFAULT_CONFIGS_URL, const.DEFAULT_CONFIGS_ARCHIVE_NAME, stdout=True) shutil.rmtree(const.DEFAULT_CONFIGS, ignore_errors=True) time.sleep(1) try: sys.stdout.write('[+] Copying default_configs -> {}\n'.format(const.DEFAULT_CONFIGS)) extract_archive(os.path.join(const.INSTALL_CACHE, 'default_configs.tar.gz'), const.CONFIG_PATH) return True except IOError as e: sys.stderr.write('[-] An error occurred while attempting to extract file. [{}]\n'.format(e)) return False
def download_oinkmaster(stdout=False): """ Download Oinkmaster archive :param stdout: Print output to console """ for url in open(const.OINKMASTER_MIRRORS, 'r').readlines(): if utilities.download_file(url, const.OINKMASTER_ARCHIVE_NAME, stdout=stdout): break
def download_elasticflow(stdout=False): """ Download Elastiflow archive :param stdout: Print output to console """ for url in open(const.ELASTIFLOW_MIRRORS, 'r').readlines(): if utilities.download_file(url, const.ELASTIFLOW_ARCHIVE_NAME, stdout=stdout): break
def download_synesis(stdout=False): """ Download SynesisLite (Suricata) archive :param stdout: Print output to console """ for url in open(const.SYNESIS_MIRRORS, 'r').readlines(): if utilities.download_file(url, const.SYNESIS_ARCHIVE_NAME, stdout=stdout): break
def download_filebeat(stdout=False): """ Download Filebeat archive :param stdout: Print output to console """ for url in open(const.FILE_BEAT_MIRRORS, 'r').readlines(): if utilities.download_file(url, const.FILE_BEAT_ARCHIVE_NAME, stdout=stdout): break
def download_zeek(stdout=False): """ Download Zeek archive :param stdout: Print output to console """ for url in open(const.ZEEK_MIRRORS, 'r').readlines(): if utilities.download_file(url, const.ZEEK_ARCHIVE_NAME, stdout=stdout): break
def download_dynamite_sdk(stdout=False): """ Download DynamiteSDK archive :param stdout: Print output to console """ for url in open(const.DYNAMITE_SDK_MIRRORS, 'r').readlines(): if utilities.download_file(url, const.DYNAMITE_SDK_ARCHIVE_NAME, stdout=stdout): break
def download_and_install(self) -> bool: from dynamite_nsm.services.kibana.process import ProcessManager from dynamite_nsm.services.kibana.package import SavedObjectsManager kibana_process = ProcessManager(stdout=True) kibana_process.start() manager = SavedObjectsManager(username=self.username, password=self.password, target=self.target, stdout=True) download_path = f'{const.INSTALL_CACHE}/{self.name}.tar.gz' utilities.download_file(self.package_link, download_path) attempts = 0 while not self.kibana_api_up() and attempts < 5: attempts += 1 sleep(10) res = manager.install(download_path, ignore_warnings=True, tenant=self.tenant) kibana_process.stop() return res
def update_mirrors(): """ Retrieves the latest mirrors which contain the download locations for all components """ shutil.rmtree(const.INSTALL_CACHE, ignore_errors=True) makedirs(const.MIRRORS, exist_ok=True) try: download_file(const.MIRRORS_CONFIG_URL, const.MIRRORS_CONFIG_ARCHIVE_NAME, stdout=True) except Exception as e: raise exceptions.DownloadError( "General error occurred while downloading archive: {}; {}".format( os.path.join(const.INSTALL_CACHE, 'mirrors.tar.gz'), e)) shutil.rmtree(const.MIRRORS, ignore_errors=True) try: extract_archive(os.path.join(const.INSTALL_CACHE, 'mirrors.tar.gz'), const.CONFIG_PATH) return True except IOError as e: raise exceptions.DownloadError( "General error occurred while extracting archive: {}; {}".format( os.path.join(const.INSTALL_CACHE, 'mirrors.tar.gz'), e))
def download_dynamite_sdk(stdout=False): """ Download DynamiteSDK archive :param stdout: Print output to console """ url = None try: with open(const.DYNAMITE_SDK_MIRRORS, 'r') as sdk_archive: for url in sdk_archive.readlines(): if utilities.download_file(url, const.DYNAMITE_SDK_ARCHIVE_NAME, stdout=stdout): break except Exception as e: raise general_exceptions.DownloadError( "General error while downloading DynamiteSDK from {}; {}". format(url, e))
def download_from_mirror(mirror_path, fname, stdout=False, verbose=False): log_level = logging.INFO if verbose: log_level = logging.DEBUG logger = get_logger('BASESVC', level=log_level, stdout=stdout) with open(mirror_path) as mirror_f: res, err = None, None for url in mirror_f.readlines(): logger.info("Downloading {} from {}".format(fname, url)) try: res = utilities.download_file(url, fname, stdout=stdout) except Exception as e: res, err = False, e logger.warning("Failed to download {} from {}; {}".format(fname, url, e)) if res: break if not res: raise general_exceptions.DownloadError( "General error while attempting to download {} from all mirrors ;".format(fname))