def _generate_ta_folder(self): ''' copy the project to a target folder and merge the conf, because best practice validator will not scan the local conf folder return the generated folder ''' target_folder = os.path.join(builder_constant.ADDON_BUILDER_APP_NAME, 'local', 'validation', self.app_name) # copy the ta project to package workspace dir app_path = common_util.make_splunk_path(['etc', 'apps', self.app_name]) tabuilder = builder.TABuilder(self.app_name, self.splunk_uri, self.session_key) tgz_file = workspace_util.package_app(tabuilder) validation_package_workspace = common_util.make_splunk_path([ 'etc', 'apps', builder_constant.ADDON_BUILDER_APP_NAME, 'local', 'validation' ]) ta_folder = os.path.join(validation_package_workspace, self.app_name) if os.path.isdir(ta_folder): shutil.rmtree(ta_folder) with tarfile.open(tgz_file, 'r:*') as tf: tf.extractall(validation_package_workspace) return target_folder
def export_project(self): # copy the ta project to package workspace dir app_path = common_util.make_splunk_path(['etc', 'apps', self.app]) package_workspace = common_util.make_splunk_path( ['var', 'data', 'tabuilder', 'export_ta', self.app]) # not packaging the project, do not merge the confs workspace_util.prepare_app_package_workspace(package_workspace, app_path, self.tabuilder) # remove the local UCC conf files, UCC conf may be encrypted confs = [ self.app + '_credential.conf', self.app + '.conf', self.app + '_customized.conf', 'password.conf', 'passwords.conf' ] confs.extend(global_setting_util.get_ucc_conf_file_names(self.app)) for c in set(confs): conf_path = os.path.join(package_workspace, 'local', c) if os.path.isfile(conf_path): os.remove(conf_path) _logger.debug('Remove the UCC conf ' + c) # remove all the inputs stanza configured in UCC, it is runtime info local_inputs = os.path.join(package_workspace, 'local', 'inputs.conf') if os.path.isfile(local_inputs): parser = conf_parser.TABConfigParser() parser.read(local_inputs) to_be_delete_sections = [ s for s in parser.sections() if len(s.split("://")) == 2 ] for s in to_be_delete_sections: parser.remove_section(s) if to_be_delete_sections: with open(local_inputs, 'w') as fp: parser.write(fp) _logger.debug('update the local inputs.conf.') # export the meta to the package_workspace meta = self._dump_add_on_project_meta(package_workspace) if meta_const.SOURCETYPE_BUILDER in meta: sourcetypes = list(meta[meta_const.SOURCETYPE_BUILDER].keys()) if sourcetypes: self._dump_sample_events(sourcetypes, package_workspace) AppMigrator._rm_hidden_files(package_workspace) download_file = self.get_exported_file_full_path(package_workspace) if os.path.isfile(download_file): os.remove(download_file) with tarfile.open(download_file, "w:gz") as tar: tar.add(package_workspace, arcname=self.app) return download_file
def remove_sourcetype_contents(tab_conf_mgr, source_app, sourcetype): # remove sourcetype stanza from source_app app_home = make_splunk_path(['etc', 'apps']) default_props = os.path.join(app_home, source_app, "default", "props.conf") local_props = os.path.join(app_home, source_app, "local", "props.conf") remove_stanza_in_conf(default_props, sourcetype) remove_stanza_in_conf(local_props, sourcetype)
def cancel_parse_unstructured_data(self, sourcetype): input_type = builder_constant.FIELD_EXTRACTION_MI self.tab_conf_mgr.delete_data_input(input_type, sourcetype) checkpoint_file = common_util.make_splunk_path([ "var", "lib", "splunk", "modinputs", builder_constant.FIELD_EXTRACTION_MI, sourcetype ]) if os.path.isfile(checkpoint_file): os.remove(checkpoint_file)
def upgrade_from_2_0_0_to_2_1_0(self): self.__logger.info( "begin to upgrate tab version from 2.0.0 to 2.1.0 in app %s.", self.__appname) global_settings = self.__ta_configuration_builder.get_global_settings( ) or None # regen the python lib if needed has_inputs = not (not self.__input_builder.get_all_TA_inputs()) has_alerts = not (not self.__alert_builder.get_all_TA_alerts()) if has_inputs or has_alerts: asset_generator = ta_static_asset_generator.AssetGenerator( self.__resource_dir, os.path.join(self.__splunk_app_dir, self.__appname), self.__resource_lib_dir, app_name=self.__appname) asset_generator.upgrade_from_2_0_0_to_2_1_0() input_upgraded = self.__input_builder.upgrade_from_2_0_0_to_2_1_0() self.alert_builder.upgrade_from_2_0_0_to_2_1_0(global_settings) global_setting_upgraded = self.__ta_configuration_builder.upgrade_from_2_0_0_to_2_1_0( input_upgraded) # update basic meta to regenerate app.conf to use ucc frontend page # this must happens after ucc upgrading basic_meta = self.__basic_builder.get_meta() if basic_meta: self.__basic_builder.update_TA_basic(basic_meta, global_setting_upgraded) else: if global_setting_upgraded: # change the local/app.conf directly local_app_conf = common_util.make_splunk_path([ self.__splunk_app_dir, self.app_name, 'local', 'app.conf' ]) if os.path.isfile(local_app_conf): parser = conf_parser.TABConfigParser() parser.read(local_app_conf) item_dict = parser.item_dict() try: parser.remove_option('ui', 'setup_view') except: pass if not parser.has_section('install'): parser.add_section('install') parser.set('install', 'is_configured', '1') if not parser.has_section('ui'): parser.add_section('ui') parser.set('ui', 'is_visible', '1') with open(local_app_conf, 'w') as f: parser.write(f) self.__logger.info( 'No basic meta. Just update the app.conf when upgrade. TA:%s', self.app_name) common_util.reload_splunk_apps( self.__service_with_tab_context) # todo: any other component update should be added here self.__logger.info("TA %s upgrade from 2.0.0 to 2.1.0 is done.", self.__appname)
def remove_legacy_validation_rules(): rule_dir = common_util.make_splunk_path([ "etc", "apps", builder_constant.ADDON_BUILDER_APP_NAME, "bin", "validation_rules", "validation_field" ]) removed_rule_specs = ("extraction_get_events.rule", ) for rule in removed_rule_specs: fullpath = os.path.join(rule_dir, rule) if os.path.isfile(fullpath): os.remove(fullpath)
def get_splunk_csv_output_path(): run_path = common_util.make_splunk_path(("var", "run", "splunk")) # for Splunk 6.4.0 or higher version, one folder "csv" is added fpath = os.path.join(run_path, "csv") if os.path.isdir(fpath): return fpath # for Splunk 6.3.*, there is no "csv" folder return run_path
def __init__(self, appname, service_with_tab_context, service_with_ta_context): self._appname = appname self._logger = logger.get_global_settings_builder_logger() pdir = os.path.split(os.path.realpath(__file__))[0] self._resource_dir = os.path.join(pdir, "resources") self._resource_lib_dir = os.path.join(pdir, "resources_lib") self._ucc_lib_resource_dir = os.path.join(pdir, 'ucc_resources') self._current_app_dir = common_util.make_splunk_path( ['etc', 'apps', self._appname]) self._service_with_ta_context = service_with_ta_context self._service_with_tab_context = service_with_tab_context self._global_setting_meta = ta_configuration_meta.GlobalSettingMeta( appname, service_with_tab_context) self._static_asset_gen = ta_static_asset_generator.AssetGenerator( self._resource_dir, self._current_app_dir, self._resource_lib_dir) self._basic_meta = ta_basic_meta.TABasicMeta( self._appname, self._service_with_tab_context)
def enable_ucc_page_in_app_conf(self): m = self._basic_meta.meta if not m: # for existing TA, just change the visible in app.conf app_conf = common_util.make_splunk_path( ['etc', 'apps', self.__appname, 'local', 'app.conf']) if os.path.isfile(app_conf): parser = conf_parser.TABConfigParser() parser.read(app_conf) if not parser.has_section('ui'): parser.add_section('ui') parser.set('ui', 'is_visible', '1') with open(app_conf, 'w') as fp: parser.write(fp) else: self.__asset_generator.generate_app_conf( m, is_setup_page_enabled=True) common_util.reload_splunk_apps(self.__service_with_tab_context)
def remove_all_unstructured_data_inputs(self): """ Cleanup the data inputs except the monitor data input """ input_type = builder_constant.FIELD_EXTRACTION_MI data_inputs = self.internal_conf_mgr.get_data_input(input_type) checkpoint_path = common_util.make_splunk_path([ "var", "lib", "splunk", "modinputs", builder_constant.FIELD_EXTRACTION_MI ]) for data_input in data_inputs: name = data_input.get("name") if name == builder_constant.FIELD_EXTRACTION_MONITOR_MI: continue self.tab_conf_mgr.delete_data_input(input_type, name) checkpoint_file = os.path.join(checkpoint_path, name) if os.path.isfile(checkpoint_file): os.remove(checkpoint_file)
def disable_ucc_page_in_app_conf(self): m = self._basic_meta.meta if not m: # for existing TA, just change the visible in app.conf app_conf = common_util.make_splunk_path( ['etc', 'apps', self.__appname, 'local', 'app.conf']) if os.path.isfile(app_conf): parser = conf_parser.TABConfigParser() parser.read(app_conf) try: parser.remove_option('ui', 'is_visible') with open(app_conf, 'w') as fp: parser.write(fp) except: pass else: self.__asset_generator.generate_app_conf( m, is_setup_page_enabled=False) # should regen the nav xml if the add-on is set as visible self.__asset_generator.generate_nav_xml( m, is_setup_page_enabled=False) common_util.reload_splunk_apps(self.__service_with_tab_context)
def detect_MI_single_instance_mode(app_bin_dir, declare_module, module_name): ''' return True if MI is single instnace mode ''' detect_single_instance_mode_code = ''' import {declare} import sys import {mi_module} if 'use_single_instance_mode' in dir({mi_module}) and {mi_module}.use_single_instance_mode(): sys.exit(0) else: sys.exit(911) ''' temp_file = os.path.join(app_bin_dir, module_name + '_detect.py') splk_python = common_util.make_splunk_path(['bin', 'python']) try: with open(temp_file, 'w') as f: code = detect_single_instance_mode_code.format( declare=declare_module, mi_module=module_name) f.write(code) _logger.debug('detect data input mode. Code:%s', code) p_child = subprocess.Popen([splk_python, temp_file], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) ret = p_child.wait() _logger.debug('detect input mode. input:%s, exit:%s', module_name, ret) return ret == 0 except: _logger.error('Exception got when detecting the input mode. %s', traceback.format_exc()) finally: if os.path.isfile(temp_file): os.remove(temp_file)
def download_exported_ta_project_file(self, action, **params): uri = scc.getMgmtUri() session = cherrypy.session.get("sessionKey") app = params.get('app', None) try: if app: migrator = AppMigrator(app, uri, session) app_root_dir = make_splunk_path(['etc', 'apps', app]) tar_file = migrator.get_exported_file_full_path(app_root_dir) if not os.path.isfile(tar_file): raise CommonException( e_message='tgz file {} not found.'.format(tar_file), err_code=41, options={'app': app}) return serve_file(tar_file, "application/x-download", "attachment") else: raise CommonException(e_message='app is not set.', err_code=40) except CommonException as ce: logger.error('%s', traceback.format_exc()) return self.render_json({ 'err_code': ce.get_err_code(), 'err_args': ce.get_options() })
def import_project(cls, app_package_file, service): ''' app_package_file should be the full file path. No return value. If fails, an exception is thrown ''' temp_dir = package_workspace = common_util.make_splunk_path([ 'var', 'data', 'tabuilder', 'import_ta', 'space_' + str(random.randint(0, 1000)) ]) try: with tarfile.open(app_package_file, mode='r:*') as tf: if os.path.isdir(temp_dir): shutil.rmtree(temp_dir) os.makedirs(temp_dir) _logger.info('Extract project package to %s', temp_dir) tf.extractall(temp_dir) except tarfile.ReadError as reade: e_message = 'Fail to extract TA project file. The file is not a valid tarfile. {}'.format( str(reade)) _logger.error(e_message + traceback.format_exc()) raise CommonException(e_message=e_message, err_code=76) except Exception as e: e_message = 'Fail to extract TA project. {}'.format(str(e)) _logger.error(e_message + traceback.format_exc()) raise CommonException(e_message=e_message, err_code=75) AppMigrator._rm_hidden_files(temp_dir) app_name, meta = cls._read_app_meta(temp_dir) if not app_name: raise CommonException(e_message='can not load ta meta.', err_code=35) extracted_app_root = os.path.join(temp_dir, app_name) if not os.path.isdir(extracted_app_root): raise CommonException( e_message= 'package root directory is not consistent with app_name.', err_code=38) # check if the app already exists in apps dir app_root = common_util.make_splunk_path(['etc', 'apps', app_name]) if os.path.isdir(app_root): raise CommonException( e_message='can not load ta meta. App dir already exists.', err_code=37, options={'app': app_name}) meta_manager.MetaManager.load_app_all_meta(service, app_name, meta, overwrite=False) # load the events to meta events_file = os.path.join(extracted_app_root, cls.EVENT_FILE) if os.path.isfile(events_file): with open(events_file, 'r') as fp: events = json.load(fp) mgr = meta_manager_event.EventMetaManager(None, None, service=service) for key, value in list(events.items()): mgr.update_meta_data(key, value) # remove all the UCC related confs, UCC confs may be encrypted ucc_files = [ os.path.join(extracted_app_root, 'local', i) for i in global_setting_util.get_ucc_conf_file_names(app_name) ] _logger.debug('All UCC files:%s', ucc_files) for f in ucc_files: if os.path.isfile(f): _logger.debug('remove ucc file:%s', f) os.remove(f) # move the dir to the etc/apps shutil.move(extracted_app_root, os.path.dirname(app_root)) # regenerate the inputs.conf, in case there is some encrypted fields in the stanza input_builder = builder_ta_input.TAInputBuilder( app_name, common_util.get_splunkd_uri(service), service.token) input_builder.regenerate_inputs_conf() _logger.debug('regen inputs.conf when importing TA:%s', app_name) # cleanup the meta files events_file = os.path.join(app_root, cls.EVENT_FILE) if os.path.isfile(events_file): os.remove(events_file) os.remove( os.path.join(app_root, package_util.get_aob_meta_file_name(app_name))) # reload the apps common_util.reload_splunk_apps(service) # return the basic info for this app brief_meta = meta_util.get_project_brief_meta(service, app_name) return brief_meta
def get_app_root(appname): return common_util.make_splunk_path(('etc', 'apps', appname))
def _run(self): ''' return: 3 element tuple (return_code, raw_stdout_out, raw_stderr_out) ''' ckpt = self._ckpter.get(CKPT_NAME) if ckpt is None: ckpt = {} if CKPT_KEY not in ckpt: ckpt[CKPT_KEY] = {} input_scheme = Template(OPTION_FILE_CONTENT).render( server_uri=self._server_uri, session_key=self._session_key, checkpoint_dir=self._checkpoint_dir, options=self._options, interval=self._interval, input_name=self._input_name, sourcetype=self._sourcetype) # runner_logger.debug('input stream:' + input_scheme) # use python3 for test by default if os.path.isfile(make_splunk_path(('bin', "python3"))) \ or os.path.isfile(make_splunk_path(('bin', "python3.exe"))): cmd2 = [self._get_splunk_bin(), 'cmd', 'python3', self._file_path] else: cmd2 = [self._get_splunk_bin(), 'cmd', 'python', self._file_path] # make it the same as core cwd = "C:\Windows\system32" if platform.system() == "Windows" else '/' # prepare the env child_env = os.environ.copy() child_env[AOB_TEST_FLAG] = 'true' if self._globalsettings: child_env[GLOBALSETTINGS] = json.dumps(self._globalsettings) child_env[DATA_INPUTS_OPTIONS] = json.dumps(self._data_inputs_options) runner_logger.debug("Start the test subprocess with env:%s", logger.hide_sensitive_field({ GLOBALSETTINGS: self._globalsettings, DATA_INPUTS_OPTIONS: self._data_inputs_options })) try: child2 = subprocess.Popen( cmd2, stdin=subprocess.PIPE, stderr=subprocess.PIPE, stdout=subprocess.PIPE, cwd=cwd, env=child_env) ckpt[CKPT_KEY][self._test_id] = { 'pid': child2.pid, 'app': self._app, 'input': self._input_name } self._ckpter.update(CKPT_NAME, ckpt) stdout_str, stderr_str = child2.communicate(input=input_scheme.encode()) stdout_str = stdout_str.decode() stderr_str = stderr_str.decode() retcode = child2.returncode del ckpt[CKPT_KEY][self._test_id] if not has_kill_flag(CKPT_DIR, self._test_id): # normal exist, not killed self._ckpter.update(CKPT_NAME, ckpt) return retcode, stdout_str, stderr_str except subprocess.CalledProcessError as e: runner_logger.error('Fail to execute the test process:%s. %s', e.cmd, traceback.format_exc()) return e.returncode, '', e.output
import logging import os import re from field_extraction_builder.regex_loader import RegexLoader import field_extraction_builder.regex_util from field_extraction_builder.regex_logger import Logs from field_extraction_builder.data_format.data_format import DataFormat from field_extraction_builder.data_format.format_handler_table import TableHandler from field_extraction_builder.data_format.format_handler_kv import KVHandler from field_extraction_builder.regex_exception import InvalidRegex, CaptureGroupCountError from tabuilder_utility.builder_exception import CommonException from ta_generator import builder_util from tabuilder_utility import common_util Logs().set_parent_dir(common_util.make_splunk_path(["var", "log", "splunk"])) from aob.aob_common import logger, builder_constant from tabuilder_utility import tab_conf_manager, search_util from tabuilder_utility.ko_util import ko_common_util from ta_meta_management import meta_manager, meta_manager_event, meta_const from aob.aob_common.metric_collector import metric_util _LOGGER = logger.get_field_extraction_builder_logger(logging.DEBUG) TAB_REPORT_TABLE_RESULTS_OBJ_NAME = "ta_builder_internal_use_table_format_results" TAB_REPORT_KV_RESULTS_OBJ_NAME = "ta_builder_internal_use_kv_format_results" TAB_GENERATED_EXTRACTION_PREFIX = "aob_gen" class TAExtractionBuilder(object): """
def _get_splunk_bin(self): if os.name == 'nt': splunk_bin = 'splunk.exe' else: splunk_bin = 'splunk' return make_splunk_path(('bin', splunk_bin))
def get_import_package_full_path(cls, file_name): import_dir = common_util.make_splunk_path( ['etc', 'apps', ADDON_BUILDER_APP_NAME, 'local', 'import']) if not os.path.isdir(import_dir): os.makedirs(import_dir) return os.path.join(import_dir, file_name)
def dryrun_modinput_code(self, datainput): ''' The dryrun returns the following structure as the results { 'status': 'success/fail', // a list of results, each result is a python dict 'results': [event1, event2, event3], error: 'error messages' } ''' # TODO: dryrun is sync call now. If the modinput is long running, # should make it as async dryrun_result = None datainput = self.__input_meta_mgr.add_default_values(datainput) self.__input_meta_mgr.validate_new_meta(datainput, 'uuid' in datainput) # if it is cc data input, should process the meta if datainput.get('type') == data_input_util.INPUT_METHOD_REST: datainput = data_input_util.process_cc_data_input_meta(datainput) if 'test_id' not in datainput: ce = CommonException( e_message='dry run job id not found.', err_code=3142, options={'input_name': datainput['name']}) raise ce datainput['server_uri'] = self.__uri datainput['session_key'] = self.__session_key datainput['checkpoint_dir'] = common_util.make_splunk_path([ 'var', 'lib', 'splunk', 'modinputs', datainput['name'], 'test_' + datainput['name'] ]) test_id = datainput['test_id'] bin_dir = builder_util.get_target_folder(self.__current_ta_dir, "bin") cc_input_builder = None if datainput.get('type') == data_input_util.INPUT_METHOD_REST: cc_input_builder = CloudConnectDataInputBuilder(datainput, datainput.get('global_settings', {})) datainput['cc_json_file'] = cc_input_builder.get_cc_json_file_path( bin_dir, True) # generate {mod input}.py if datainput[ 'type'] == data_input_util.INPUT_METHOD_CUSTOMIZED and 'code' not in datainput: raise CommonException( e_message='No code in data input:{}'.format(datainput['name']), err_code=3141, options={'input_name': datainput['name']}) elif datainput['type'] in [ data_input_util.INPUT_METHOD_CMD, data_input_util.INPUT_METHOD_REST ]: datainput['code'] = self.generate_input_module_content(datainput) test_input_module = self.get_input_module_file_name(datainput['name'] + test_id) targetfile = os.path.join(bin_dir, '{}.py'.format(test_input_module)) with open(targetfile, 'w') as f: f.write(datainput['code']) datainput['input_module_file'] = targetfile # generate input.py modinput_content = self.generate_python_modinput_content(datainput) # Important: should replace the base input module name, since it is # changed! old_import = TAInputMetaMgr.BASE_INPUT_MODULE_IMPORT.format( self.get_input_module_file_name(datainput['name'])) new_import = TAInputMetaMgr.BASE_INPUT_MODULE_IMPORT.format( test_input_module) modinput_content = modinput_content.replace(old_import, new_import) datainput['code'] = modinput_content datainput['modinput_file'] = os.path.join( bin_dir, '{}.py'.format(datainput['name'] + test_id)) try: self.__asset_generator.generate_import_declare_if_not_exist() self.__asset_generator.generate_python_libs_if_not_exist() # generate cc json if cc_input_builder: cc_input_builder.generate_cc_input_json(bin_dir, True) # Do not open this log in production env. It may log some user credential: TAB-2191 # self.__logger.debug("begine to test data input %s", logger.hide_sensitive_field(datainput)) code_runner = runner.CodeRunner(self.__appname, datainput) return_code, stdout_buffer, stderr_buffer = code_runner.run() if cc_input_builder: dryrun_result = cc_input_builder.process_cc_input_dry_run_result( return_code, stdout_buffer, stderr_buffer) else: if return_code == 0: # success raw_events = data_input_util.parse_MI_output_xml( stdout_buffer) dryrun_result = { 'status': 'success', 'results': raw_events } else: dryrun_result = { 'status': 'fail', 'results': [], 'error': stderr_buffer } except Exception as e: self.__logger.error('Error happens when dry run input:%s. \n %s', datainput['modinput_file'], traceback.format_exc()) raise e finally: # clean up the base modinput python files. The modinput file will # be cleaned in code runner if 'input_module_file' in datainput and os.path.isfile(datainput[ 'input_module_file']): os.remove(datainput['input_module_file']) self.__logger.debug( 'remove input module file:%s after testing.', datainput['input_module_file']) self.__asset_generator.cleanup_ta_bin_folder() for f in os.listdir(bin_dir): if f.endswith('.pyc'): self.__logger.debug('remove %s after testing.', f) os.remove(os.path.join(bin_dir, f)) if cc_input_builder: cc_json = cc_input_builder.get_cc_json_file_path(bin_dir, True) if os.path.isfile(cc_json): self.__logger.debug('delete dryrun cc json:%s.', cc_json) os.remove(cc_json) return dryrun_result
def start_subprocess(self): self.prepare() # use python3 for test by default if os.path.isfile(make_splunk_path(('bin', "python3"))) \ or os.path.isfile(make_splunk_path(('bin', "python3.exe"))): cmd = [ self._get_splunk_bin(), 'cmd', 'python3', self._py_file, "--execute" ] else: cmd = [ self._get_splunk_bin(), 'cmd', 'python', self._py_file, "--execute" ] # cmd = ["python", self._py_file, "--execute"] for one_input in self._inputs: try: child_env = os.environ.copy() child_env[AOB_TEST_FLAG] = 'true' child_env[GLOBALSETTINGS] = json.dumps( self._test_global_settings.get('settings', {})) self._child_proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stderr=subprocess.PIPE, stdout=subprocess.PIPE, cwd=self._code_test_dir, env=child_env) self._logger.info('operation="start subprocess", pid="%s", '\ 'status="success", input="%s"', self._child_proc.pid, one_input) except subprocess.CalledProcessError as e: self._logger.info('operation="start subprocess", pid="%s", '\ 'status="failed", input="%s", reason="%s"', self._child_proc.pid, one_input, e.output) self._stderr_thd = Thread(target=self.read_stdpipe_and_write, args=(self._child_proc.stderr, self._stderr_file, sys.stderr, "stderr")) self._stdout_thd = Thread(target=self.read_stdpipe_and_write, args=(self._child_proc.stdout, self._stdout_file, sys.stdout, "stdout")) self._stderr_thd.daemon = True self._stdout_thd.daemon = True self._child_proc.stdin.write(json.dumps(one_input).encode()) self._child_proc.stdin.close() self._stderr_thd.start() self._stdout_thd.start() self._stdout_thd.join(self._timeout) self._stderr_thd.join(self._timeout) if self._stderr_thd.is_alive() or self._stdout_thd.is_alive(): msg = 'pid="{}" alert="{}" timeout={}'.format( self._child_proc.pid, self._alert_name, self._timeout) self.kill_subprocess() raise aae.AlertTestSubprocessTimeoutFailure(msg) self._child_proc.wait() self._subprocess_out["exit_code"] = self._child_proc.returncode '''