def change_values_for_one_site(self, site, new_value_for_keyname1, new_value_for_keyname2, new_value_for_keyname3): self.set_site_svn_lhost_path(site) self.set_subpath_siteid(site) with temporary.temp_dir(parent_dir='/tmp/') as tmp_work_dir: teml_yml_fl = str(tmp_work_dir) + '/{site}/lhost.yml'.format(site = site) self.set_work_dir(tmp_work_dir) self.wc.info() with open(teml_yml_fl) as scanner_conts: lhost_yml_dict = yaml.load(scanner_conts) t='threshold_values' if t in lhost_yml_dict: lhost_yml_dict['threshold_values'] = lhost_yml_dict.get('threshold_values', {}) if self.keyName1 in lhost_yml_dict['threshold_values']: lhost_yml_dict['threshold_values'][self.keyName1] = self.new_value_for_keyname1 lhost_yml_dict['threshold_values'][self.keyName2] = self.new_value_for_keyname2 lhost_yml_dict['threshold_values'][self.keyName3] = self.new_value_for_keyname3 else: d={self.keyName1:new_value_for_keyname1,self.keyName2:new_value_for_keyname2,self.keyName3:new_value_for_keyname3} lhost_yml_dict['threshold_values'].update(d) else: p1={t:{self.keyName1:new_value_for_keyname1,self.keyName2:new_value_for_keyname2,self.keyName3:new_value_for_keyname3}} lhost_yml_dict['threshold_values'].update(p1) self.process_config_file('/lhost.yml', yaml.safe_dump(lhost_yml_dict, default_flow_style=False)) self.log_status() revision = self.wc.commit(msg='Updated lhost details of siteid:%s as part of %s on %s' % (site, 'FCO83000210' , datetime.now())) # FCO83000210 if revision: log.info('revision: %s', str(revision)) else: log.debug('No changes, nothing to commit')
def test_temp_dir_with_chdir_creates_temp_dir(): cwd = os.getcwd() with temporary.temp_dir(make_cwd=True) as temp_dir: assert temp_dir.is_dir() assert temp_dir.samefile(os.getcwd()) assert not temp_dir.exists() assert cwd == os.getcwd()
def change_values_for_one_site(self, site): # new_value_for_keyname1, new_value_for_keyname2): self.set_site_svn_lhost_path(site) self.set_subpath_siteid(site) with temporary.temp_dir(parent_dir='/tmp/') as tmp_work_dir: teml_yml_fl = str(tmp_work_dir) + '/{site}/lhost.yml'.format(site=site) print teml_yml_fl self.set_work_dir(tmp_work_dir) self.wc.info() with open(teml_yml_fl) as scanner_conts: lhost_yml_dict = yaml.load(scanner_conts) # print lhost_yml_dict lhost_yml_dict['endpoints'] = lhost_yml_dict.get('endpoints', {}) lhost_yml_dict['endpoints'] = re.sub('(\$user)+', '$USER', lhost_yml_dict['endpoints'], flags=re.M | re.IGNORECASE) print lhost_yml_dict['endpoints'] # print lhost_yml_dict['endpoints'] # r = requests.get(lhost_url, auth=('operator', 'st3nt0r')) # with open('Arjun_Lhost.yml', 'w') as f: # f.write(r.content) # lhost_yml_dict['shinken_resources'][self.keyName1] = self.encrypt(new_value_for_keyname1) # lhost_yml_dict['shinken_resources'][self.keyName2] = self.encrypt(site.upper() + new_value_for_keyname2) self.process_config_file('/lhost.yml', yaml.safe_dump(lhost_yml_dict, default_flow_style=False, Dumper=MyDumper)) self.log_status() revision = self.wc.commit(msg='Updated lhost details of siteid:%s as part of %s on %s' % (site, 'FCO12345678', datetime.now())) # FCO12345678 if revision: log.info('revision: %s', str(revision)) else: log.debug('No changes, nothing to commit')
def change_values_for_one_site(self, site, new_value_for_keyname1, new_value_for_keyname2): self.set_site_svn_lhost_path(site) self.set_subpath_siteid(site) with temporary.temp_dir(parent_dir='/tmp/') as tmp_work_dir: teml_yml_fl = str(tmp_work_dir) + '/{site}/lhost.yml'.format( site=site) self.set_work_dir(tmp_work_dir) self.wc.info() with open(teml_yml_fl) as scanner_conts: lhost_yml_dict = yaml.load(scanner_conts) t = 'threshold_values' lhost_yml_dict['shinken_resources'] = lhost_yml_dict.get( 'shinken_resources', {}) lhost_yml_dict['shinken_resources'][ self.keyName1] = self.encrypt(new_value_for_keyname1) lhost_yml_dict['shinken_resources'][ self.keyName2] = self.encrypt(site.upper() + new_value_for_keyname2) p1 = {t: {'WAITING_WARNING': 3, 'WAITING_CRITICAL': 1}} lhost_yml_dict.update(p1) self.process_config_file( '/lhost.yml', yaml.safe_dump(lhost_yml_dict, default_flow_style=False)) self.log_status() revision = self.wc.commit( msg='Updated lhost details of siteid:%s as part of %s on %s' % (site, 'FCO83000210', datetime.now())) # FCO83000210 if revision: log.info('revision: %s', str(revision)) else: log.debug('No changes, nothing to commit')
def test_temp_dir_deletes_all_children(): with temporary.temp_dir() as temp_dir: temp_file = temp_dir / 'deep/deeper/file' create_file_in_tree(temp_file) assert temp_file.is_file() assert not temp_dir.exists() assert not temp_file.exists()
def test_temp_dir_without_chdir_creates_temp_dir(): cwd = os.getcwd() with temporary.temp_dir() as temp_dir: assert temp_dir.is_dir() assert os.getcwd() == cwd assert not temp_dir.exists() assert os.getcwd() == cwd
def test_temp_dir_passes_through_mkdtemp_args(master_mock): master_mock.mkdtemp.side_effect = (DummyException(),) try: with temporary.temp_dir('suffix', 'prefix', 'parent_dir'): pass # pragma: no cover except DummyException: master_mock.mkdtemp.assert_called_once_with('suffix', 'prefix', 'parent_dir') raise
def test_temp_dir_with_failed_rmtree(master_mock): master_mock.rmtree.side_effect = (OSError(-1, 'Fake'),) temp_dir = None try: with temporary.temp_dir() as temp_dir: pass finally: assert temp_dir.is_dir() temp_dir.rmdir()
def configure(pytestconfig, config_dict): config = copy.deepcopy(config_dict) with temporary.temp_dir() as td, PGTest(max_connections=100) as pgt: with reset_after_run(): from ._setup import run_setup with open(os.devnull, 'w') as devnull, redirect_stdout(devnull): run_setup(profile='test_profile', db_user='******', db_port=pgt.port, db_name='postgres', db_pass='', repo=str(td)) from ._computer import setup_computer computers = config.get('computers', {}) for name, kwargs in computers.items(): setup_computer( name=name, **{k: v for k, v in kwargs.items() if k != 'queue_name'}) from ._code import setup_code codes = config.get('codes', {}) for label, kwargs in codes.items(): setup_code(label=label, **kwargs) # with same pattern setup test psf- pseudo family from ._pseudo_family import setup_pseudo_family pseudo_families = config.get('pseudo_families', {}) for group_name, kwargs in pseudo_families.items(): setup_pseudo_family(group_name=group_name, **kwargs) yield if not pytestconfig.option.quiet_wipe: capture_manager = pytest.config.pluginmanager.getplugin( 'capturemanager') # Handle compatibility break in pytest init = getattr( capture_manager, 'init_capturings', getattr(capture_manager, 'start_global_capturing', None)) suspend = getattr( capture_manager, 'suspendcapture', getattr(capture_manager, 'suspend_global_capture', None)) resume = getattr( capture_manager, 'resumecapture', getattr(capture_manager, 'resume_global_capture', None)) try: init() except AssertionError: pass suspend(in_=True) raw_input( "\nTests finished. Press enter to wipe the test AiiDA environment." ) resume()
def test_temp_file_with_failed_remove(): @simian.patch(temporary.files, external=('pathlib2.Path', )) def blow_up(master_mock): master_mock.Path.return_value.unlink.side_effect = (OSError( -1, 'Fake'), ) with temporary.temp_file(parent_dir=parent_dir): pass with temporary.temp_dir() as parent_dir: blow_up() # pylint: disable=no-value-for-parameter
def change_values_for_one_site(self, site, new_value_for_keyname1, new_value_for_keyname2): self.set_site_svn_lhost_path(site) self.set_subpath_siteid(site) with temporary.temp_dir(parent_dir='/tmp/') as tmp_work_dir: teml_yml_fl = str(tmp_work_dir) + '/{site}/lhost.yml'.format( site=site) self.set_work_dir(tmp_work_dir) self.wc.info() with open(teml_yml_fl) as scanner_conts: lhost_yml_dict = yaml.load(scanner_conts) lhost_yml_dict['shinken_resources'] = lhost_yml_dict.get( 'shinken_resources', {}) lhost_yml_dict['shinken_resources'][ self.keyName1] = QuotedString( self.encrypt(new_value_for_keyname1)) lhost_yml_dict['shinken_resources'][ self.keyName2] = QuotedString( self.encrypt(site.upper() + new_value_for_keyname2)) lhost_yml_dict['endpoints'] = lhost_yml_dict.get( 'endpoints', {}) length_list = len(lhost_yml_dict['endpoints']) # Updating the username and password $user$ from lowercase to uppercase for index_usr in range(length_list): if type(lhost_yml_dict['endpoints'][index_usr][ self.keyName3]) is dict: orig_usr = lhost_yml_dict['endpoints'][index_usr][ self.keyName3]['$resource'] lhost_yml_dict['endpoints'][index_usr][ self.keyName3]['$resource'] = re.sub('(\$user)+', '$USER', orig_usr, flags=re.I) if type(lhost_yml_dict['endpoints'][index_usr][ self.keyName4]) is dict: orig_pwd = lhost_yml_dict['endpoints'][index_usr][ self.keyName4]['$resource'] lhost_yml_dict['endpoints'][index_usr][ self.keyName4]['$resource'] = re.sub('(\$user)+', '$USER', orig_pwd, flags=re.I) self.process_config_file( '/lhost.yml', yaml.dump(lhost_yml_dict, default_flow_style=False, Dumper=MyDumper)) self.log_status() revision = self.wc.commit( msg='Updated lhost details of siteid:%s as part of %s on %s' % (site, 'FCO12345678', datetime.now())) # FCO12345678 if revision: log.info('revision: %s', str(revision)) else: log.debug('No changes, nothing to commit')
def create_standalone_instance(ctx, name, zone, machine_type, address, bot_config, stackdriver_logging): args = [ 'gcloud', 'compute', 'instances', 'create', name, '--image', 'container-vm', '--zone', zone, '--machine-type', machine_type, ] if address: args.append('--address') args.append(address) with open(bot_config) as f: bot_config_content = ConfigFileParser().parse(f) if len(bot_config_content): secret_args = [ '--metadata', ','.join('='.join(item) for item in bot_config_content.items()), ] with temp_dir() as d: # add metadata from file args.append('--metadata-from-file') metadata_files = [ 'google-container-manifest=etc/standalone-bot-containers.yaml' ] startup_script_path = os.path.join(d, 'startup-script.sh') if stackdriver_logging: urllib.urlretrieve(LOGGING_AGENT_INSTALL_SCRIPT, startup_script_path) with open(startup_script_path, 'a') as f: f.write('\nmkdir -p /var/log/bot\n') metadata_files.append('startup-script={}'.format(startup_script_path)) args.append(','.join(metadata_files)) confirm = "Create the following instance? (+{num} metadata from {config})\n{command}".format( num=len(bot_config_content), config=bot_config, command=' '.join(args)) click.confirm(confirm, abort=True) subprocess.call(args + secret_args) if stackdriver_logging: ctx.invoke(copy_fluentd_conf, name=name, zone=zone)
def create_instance(ctx, name, zone, machine_type, address, bot_config, stackdriver_logging): args = [ 'gcloud', 'compute', 'instances', 'create', name, '--image', 'container-vm', '--zone', zone, '--machine-type', machine_type, ] if address: args.append('--address') args.append(address) with open(bot_config) as f: bot_config_content = ConfigFileParser().parse(f) if len(bot_config_content): secret_args = [ '--metadata', ','.join('='.join(item) for item in bot_config_content.items()), ] with temp_dir() as d: # add metadata from file args.append('--metadata-from-file') metadata_files = ['google-container-manifest=etc/containers.yaml'] startup_script_path = os.path.join(d, 'startup-script.sh') if stackdriver_logging: urllib.urlretrieve(LOGGING_AGENT_INSTALL_SCRIPT, startup_script_path) with open(startup_script_path, 'a') as f: f.write('\nmkdir -p /var/log/bot\n') metadata_files.append('startup-script={}'.format(startup_script_path)) args.append(','.join(metadata_files)) confirm = "Create the following instance? (+{num} metadata from {config})\n{command}".format( num=len(bot_config_content), config=bot_config, command=' '.join(args)) click.confirm(confirm, abort=True) subprocess.call(args + secret_args) if stackdriver_logging: ctx.invoke(copy_fluentd_conf, name=name, zone=zone)
def test_manually_deleting_temp_dir_is_allowed(): with temporary.temp_dir() as temp_dir: temp_dir.rmdir()
def test_changing_to_temp_dir_manually_still_allows_deletion(): with temporary.temp_dir() as temp_dir: os.chdir(str(temp_dir))
def test_temp_file_in_custom_parent_dir(): with temporary.temp_dir() as parent_dir: with temporary.temp_file(parent_dir=parent_dir) as temp_file: assert temp_file.parent == parent_dir