def find_new_entries_in_infosys(config_xml, source, source_type, skip_disabled, vo_name=''): """ For the given information system, find any new entries that are not already in the config. """ try: # Find all config entries not disabled config_dom = minidom.parse(config_xml) config_entries = infosys_lib.parse_entries(config_dom, skip_missing_ref_id=True, skip_disabled=skip_disabled) except: print "Error parsing the config file '%s' for entries, exiting the tool." % config_xml sys.exit(2) # Query the given info system if source_type.lower() == 'bdii': infosys_entries = infosys_lib.query_bdii(source, vo_name) elif source_type.lower() == 'ress': # Update path with condor condor_path = infosys_lib.parse_condor_path(config_dom) os.environ["CONDOR_CONFIG"] = condor_path + "/etc/condor_config" condorExe.set_path(condor_path + "/bin", condor_path + "/sbin") ress_entries = infosys_lib.query_ress(source, vo_name) # Remove duplicate entries infosys_entries = remove_duplicates(ress_entries) elif source_type.lower() == 'tg': infosys_entries = infosys_lib.query_teragrid() # Compare config entries with what is found in the information system new_entries = [] for infosys_id in infosys_entries: entry_i = infosys_entries[infosys_id] found_match = False for config_entry in config_entries: entry_c = config_entries[config_entry] # Check if ids match if entry_i['ref_id'] == entry_c['ref_id']: # Check same source types between config and infosys entries # TODO do we need to check source url too? if entry_c['source_type'].lower() == source_type.lower(): found_match = True # already have this entry break else: # Check if content matches for other infosys or manual entries if entry_i['gatekeeper'] == entry_c['gatekeeper'] and entry_i['gridtype'] == entry_c['gridtype'] and entry_i['rsl'] == entry_c['rsl']: found_match = True # already have this entry # TODO here could add ability to update ref_ids if find additional matching entry # not sure if we want to for ress entries tho? break if not found_match: new_entries.append(infosys_entries[infosys_id]) return new_entries
def delete_ps_directories(self,dirs): """ Delete the contents of directories with privilege separation in effect.""" for type in dirs.keys(): if type not in ["client logs", "client proxies",]: common.remove_dir_path(dirs[type]) continue #-- Factory create requires client logs/proxies directories be empty #-- when privspep is in effect condor_sbin = "%s/sbin" % self.wms.condor_location() condor_bin = "%s/bin" % self.wms.condor_location() condorExe.set_path(condor_bin,condor_sbin) parent_dir = dirs[type] subdirs = os.listdir(parent_dir) for base_dir in subdirs: if os.path.isdir("%s/%s" % (parent_dir,base_dir)): try: condorPrivsep.rmtree(parent_dir,base_dir) except Exception,e: common.logerr("""Encountered a problem in executing condor_root_switchboard to remove this client's sub-directories: %(dir)s %(error)s Check your /etc/condor/privsep.conf file to verify. You may need to configure/install your WMS Collector to resolve or correct the ini file for the %(type)s attribute. Be careful now. """ % { "dir" : parent_dir, "type" : type, "error" : e, } ) common.logit("Files in %s deleted" % parent_dir)
def find_entries_with_different_content(config_xml, skip_disabled): """ Find entries where their content doesn't match what is published in the information system. """ try: # Find all enabled config entries with ref ids config_dom = minidom.parse(config_xml) config_entries = infosys_lib.parse_entries(config_dom, skip_missing_ref_id=True, skip_disabled=skip_disabled) # Create an info systems list from factory config infosystems = infosys_lib.parse_info_systems(config_dom) has_ress = False for infosys in infosystems: if infosystems[infosys].lower() == 'ress': has_ress = True break if has_ress: # Update path with condor condor_path = infosys_lib.parse_condor_path(config_dom) os.environ["CONDOR_CONFIG"] = condor_path + "/etc/condor_config" condorExe.set_path(condor_path + "/bin", condor_path + "/sbin") except: print("Error parsing the the config file '%s', exiting the tool." % config_xml) sys.exit(2) # Retrieve info systems entries bdii_entries = {} ress_entries = {} tg_entries = {} for infosys, type in infosystems.iteritems(): if type.lower() == 'bdii': bdii_entries.update(infosys_lib.query_bdii(infosys)) elif type.lower() == 'ress': ress_entries.update(infosys_lib.query_ress(infosys)) elif type.lower() == 'tg': tg_entries.update(infosys_lib.query_teragrid()) id_match_bdii_entries = find_entries_id_match(bdii_entries, config_entries, 'bdii') id_match_ress_entries = find_entries_id_match(ress_entries, config_entries, 'ress') id_match_tg_entries = find_entries_id_match(tg_entries, config_entries, 'tg') return id_match_bdii_entries, id_match_ress_entries, id_match_tg_entries
def find_entries_with_ids_not_published(config_xml, skip_disabled): """ Find config entries not published in the information systems. """ try: # Find all enabled config entries with ref ids config_dom = minidom.parse(config_xml) config_entries = infosys_lib.parse_entries(config_dom, skip_missing_ref_id=True, skip_disabled=skip_disabled) # Create an info systems list from factory config infosystems = infosys_lib.parse_info_systems(config_dom) has_ress = False for infosys in infosystems: if infosystems[infosys].lower() == 'ress': has_ress = True break if has_ress: # Update path with condor condor_path = infosys_lib.parse_condor_path(config_dom) os.environ["CONDOR_CONFIG"] = condor_path + "/etc/condor_config" condorExe.set_path(condor_path + "/bin", condor_path + "/sbin") except: print "Error parsing the the config file '%s', exiting the tool." % config_xml sys.exit(2) # Retrieve info systems entries bdii_entries = {} ress_entries = {} tg_entries = {} for infosys, type in infosystems.iteritems(): if type.lower() == 'bdii': bdii_entries.update(infosys_lib.query_bdii(infosys)) elif type.lower() == 'ress': ress_entries.update(infosys_lib.query_ress(infosys)) elif type.lower() == 'tg': tg_entries.update(infosys_lib.query_teragrid()) id_not_found_bdii_entries = find_entries_id_not_found(bdii_entries, config_entries, 'bdii') id_not_found_ress_entries = find_entries_id_not_found(ress_entries, config_entries, 'ress') id_not_found_tg_entries = find_entries_id_not_found(tg_entries, config_entries, 'tg') return id_not_found_bdii_entries, id_not_found_ress_entries, id_not_found_tg_entries
def test_query_ress(self): """ Test querying RESS """ # Condor path and config location # These will be set correctly as long as the test is run in the same environment # as what is needed to run the factory/wms collector if "CONDOR_CONFIG" not in os.environ: condor_config = "/etc/condor/condor_config" condorExe.init() self.assertTrue(condorExe.condor_bin_path != None and condorExe.condor_sbin_path != None) condorExe.set_path(condorExe.condor_bin_path, condorExe.condor_sbin_path) # Test that information is retrieved and is populated correctly infosys_entries = query_ress("osg-ress-1.fnal.gov", "engage") self.assertNotEqual(infosys_entries, {}) keys = infosys_entries.keys() entry = infosys_entries[keys[0]] self.assertTrue(entry['site_name'] != '') self.assertTrue(entry['gridtype'] != '') self.assertTrue(entry['gatekeeper'] != '') self.assertTrue(entry['wall_clocktime'] != 0) self.assertTrue(entry['ref_id'] != '') self.assertTrue(entry['ce_status'] != '') self.assertTrue(entry['glexec_bin'] == 'OSG') self.assertTrue(entry['work_dir'] == 'OSG') self.assertEqual(entry['source'], "osg-ress-1.fnal.gov") self.assertEqual(entry['source_type'], 'RESS') self.assertTrue(entry['GlueCEUniqueID'] != '') # Test bad ress source self.assertRaises(Exception, query_ress, "bad.url", "cms") # Test bad vo name infosys_entries = query_ress( "osg-ress-1.fnal.gov", "junk_testing_bad_vo_name_that_is_not_valid") self.assertEqual(infosys_entries, {}) # Test empty vo name infosys_entries = query_ress("osg-ress-1.fnal.gov", "") self.assertTrue(infosys_entries != {})
def get_ress_data(self): common.logit("ReSS host: %s" % self.glidein.ress_host()) #-- validate host --- if not common.url_is_valid(self.glidein.ress_host()): common.logerr("ReSS server (%s) in ress_host option is not valid or inaccssible." % self.glidein.ress_host()) condor_sbin = "%s/sbin" % self.wms.condor_location() condor_bin = "%s/bin" % self.wms.condor_location() condorExe.set_path(condor_bin,condor_sbin) #-- get gatekeeper data from ReSS -- common.logit("Supported VOs: %s" % self.glidein.entry_vos()) constraint = self.glidein.ress_vo_constraint() common.logit("Constraints: %s" % constraint) condor_obj=condorMonitor.CondorStatus(pool_name=self.glidein.ress_host()) try: condor_obj.load(constraint=constraint) condor_data=condor_obj.fetchStored() except Exception,e: common.logerr(e)
def test_query_ress(self): """ Test querying RESS """ # Condor path and config location # These will be set correctly as long as the test is run in the same environment # as what is needed to run the factory/wms collector if not os.environ.has_key("CONDOR_CONFIG"): condor_config="/etc/condor/condor_config" condorExe.init() self.assertTrue(condorExe.condor_bin_path != None and condorExe.condor_sbin_path != None) condorExe.set_path(condorExe.condor_bin_path, condorExe.condor_sbin_path) # Test that information is retrieved and is populated correctly infosys_entries = query_ress("osg-ress-1.fnal.gov", "engage") self.assertNotEqual(infosys_entries, {}) keys = infosys_entries.keys() entry = infosys_entries[keys[0]] self.assertTrue(entry['site_name'] != '') self.assertTrue(entry['gridtype'] != '') self.assertTrue(entry['gatekeeper'] != '') self.assertTrue(entry['wall_clocktime'] != 0) self.assertTrue(entry['ref_id'] != '') self.assertTrue(entry['ce_status'] != '') self.assertTrue(entry['glexec_bin'] == 'OSG') self.assertTrue(entry['work_dir'] == 'OSG') self.assertEqual(entry['source'], "osg-ress-1.fnal.gov") self.assertEqual(entry['source_type'], 'RESS') self.assertTrue(entry['GlueCEUniqueID'] != '') # Test bad ress source self.assertRaises(Exception, query_ress, "bad.url", "cms") # Test bad vo name infosys_entries = query_ress("osg-ress-1.fnal.gov", "junk_testing_bad_vo_name_that_is_not_valid") self.assertEqual(infosys_entries, {}) # Test empty vo name infosys_entries = query_ress("osg-ress-1.fnal.gov", "") self.assertTrue(infosys_entries != {})
def get_ress_data(self): common.logit("ReSS host: %s" % self.glidein.ress_host()) #-- validate host --- if not common.url_is_valid(self.glidein.ress_host()): common.logerr("ReSS server (%s) in ress_host option is not valid or inaccssible." % self.glidein.ress_host()) condor_sbin = "%s/sbin" % self.wms.condor_location() condor_bin = "%s/bin" % self.wms.condor_location() condorExe.set_path(condor_bin, condor_sbin) #-- get gatekeeper data from ReSS -- common.logit("Supported VOs: %s" % self.glidein.entry_vos()) constraint = self.glidein.ress_vo_constraint() common.logit("Constraints: %s" % constraint) condor_obj=condorMonitor.CondorStatus(pool_name=self.glidein.ress_host()) try: condor_obj.load(constraint=constraint) condor_data=condor_obj.fetchStored() except Exception as e: common.logerr(e) del condor_obj return condor_data
def find_new_entries_in_infosys(config_xml, source, source_type, skip_disabled, vo_name=''): """ For the given information system, find any new entries that are not already in the config. """ try: # Find all config entries not disabled config_dom = minidom.parse(config_xml) config_entries = infosys_lib.parse_entries(config_dom, skip_missing_ref_id=True, skip_disabled=skip_disabled) except: print( "Error parsing the config file '%s' for entries, exiting the tool." % config_xml) sys.exit(2) # Query the given info system if source_type.lower() == 'bdii': infosys_entries = infosys_lib.query_bdii(source, vo_name) elif source_type.lower() == 'ress': # Update path with condor condor_path = infosys_lib.parse_condor_path(config_dom) os.environ["CONDOR_CONFIG"] = condor_path + "/etc/condor_config" condorExe.set_path(condor_path + "/bin", condor_path + "/sbin") ress_entries = infosys_lib.query_ress(source, vo_name) # Remove duplicate entries infosys_entries = remove_duplicates(ress_entries) elif source_type.lower() == 'tg': infosys_entries = infosys_lib.query_teragrid() # Compare config entries with what is found in the information system new_entries = [] for infosys_id in infosys_entries: entry_i = infosys_entries[infosys_id] found_match = False for config_entry in config_entries: entry_c = config_entries[config_entry] # Check if ids match if entry_i['ref_id'] == entry_c['ref_id']: # Check same source types between config and infosys entries # TODO do we need to check source url too? if entry_c['source_type'].lower() == source_type.lower(): found_match = True # already have this entry break else: # Check if content matches for other infosys or manual entries if entry_i['gatekeeper'] == entry_c['gatekeeper'] and entry_i[ 'gridtype'] == entry_c['gridtype'] and entry_i[ 'rsl'] == entry_c['rsl']: found_match = True # already have this entry # TODO here could add ability to update ref_ids if find additional matching entry # not sure if we want to for ress entries tho? break if not found_match: new_entries.append(infosys_entries[infosys_id]) return new_entries