def _ensure_valid_user_vo_file(self): using_gums = self.authorization_method == 'xacml' if not (validation.valid_user_vo_file(USER_VO_MAP_LOCATION) and utilities.get_vos(USER_VO_MAP_LOCATION)): self.log("Trying to create user-vo-map file", level=logging.INFO) result = False if using_gums: sys.stdout.write( "Querying GUMS server. This may take some time\n") sys.stdout.flush() result = utilities.run_script(['/usr/bin/gums-host-cron']) if not result: # gums-host-cron failed, let's try the json interface try: sys.stdout.write( "Querying GUMS server via JSON interface. This may take some time\n" ) sys.stdout.flush() user_vo_file_text = gums_supported_vos.gums_json_user_vo_map_file( self.gums_host) open(USER_VO_MAP_LOCATION, "w").write(user_vo_file_text) return True except exceptions.Error as e: self.log( "Could not query GUMS server via JSON interface: %s" % e, level=logging.WARNING) else: sys.stdout.write( "Running edg-mkgridmap, this process may take some time to query vo servers\n" ) sys.stdout.flush() result = utilities.run_script(['/usr/sbin/edg-mkgridmap']) temp, invalid_lines = validation.valid_user_vo_file( USER_VO_MAP_LOCATION, True) result = result and temp if not result: if not invalid_lines: self.log( "Empty %s generated, please check the GUMS configuration (if using GUMS), " "the edg-mkgridmap configuration (if not using GUMS), and/or log messages for the above" % USER_VO_MAP_LOCATION, level=logging.WARNING) else: self.log("Invalid lines in user-vo-map file:", level=logging.WARNING) self.log("\n".join(invalid_lines), level=logging.WARNING) self.log("Error creating user-vo-map file", level=logging.WARNING) return False else: return True
def _enable_metrics(self, host, metrics, args=None): """Given a host and array of metrics, enable them via rsv-control :param host: FQDN of host to enable metrics for :type host: str :param metrics: list of metrics to enable :type metrics: list :param args: extra arguments to rsv-control :type args: list or None :raise ConfigFailed: if rsv-control fails """ # need this to prevent weird behaviour if [] as a default argument in function def args = args or [] if not metrics: return if not utilities.run_script( [self.rsv_control, "-v0", "--enable", "--host", host] + args + metrics): self.log("ERROR: Attempt to enable metrics via rsv-control failed", level=logging.ERROR) self.log("Host: %s" % host, level=logging.ERROR) self.log("Metrics: %s" % " ".join(metrics), level=logging.ERROR) raise exceptions.ConfigureError
def _configure_consumers(self): """ Enable the appropriate consumers """ # The current logic is: # - we ALWAYS want the html-consumer if we are told to install consumers # - we NEVER want the gratia-consumer # - we want the nagios-consumer if enable_nagios is True # - we want the zabbix-consumer if enable_zabbix is True and rsv-consumers-zabbix is installed consumers = ["html-consumer"] if self.opt_val("enable_gratia"): self.log( "Your configuration has enabled the Gratia consumer but the service which the Gratia consumer " "reports to has been shut down. Please turn off 'enable_gratia' in the RSV section. " "Gratia consumer configuration will be ignored.", level=logging.WARNING) if self.options['enable_nagios'].value: consumers.append("nagios-consumer") self._configure_nagios_files() if self.options['enable_zabbix'].value: if not utilities.rpm_installed('rsv-consumers-zabbix'): self.log( 'Your configuration has enabled the Zabbix consumer ' 'but rsv-consumers-zabbix is not installed. Zabbix consumer configuration will be ignored.', level=logging.WARNING) else: consumers.append("zabbix-consumer") self._configure_zabbix_files() consumer_list = " ".join(consumers) self.log("Enabling consumers: %s " % consumer_list) if not utilities.run_script([self.rsv_control, "-v0", "--enable"] + consumers): raise exceptions.ConfigureError utilities.run_script( [self.rsv_control, "-v0", "--disable", "gratia-consumer"]) # don't care if this fails
def create_user_vo_file(using_gums=False): """ Check and create a mapfile if needed """ map_file = '/var/lib/osg/user-vo-map' try: if validation.valid_user_vo_file(map_file): return True if using_gums: gums_script = '/usr/bin/gums-host-cron' else: gums_script = '/usr/sbin/edg-mkgridmap' sys.stdout.write("Running %s, this process may take some time " % gums_script + "to query vo and gums servers\n") sys.stdout.flush() if not utilities.run_script([gums_script]): return False except IOError: return False return True
def set_default_jobmanager(self, default='fork'): """ Set the default jobmanager Arguments: default - Indicates the default jobmanger, currently either 'fork' or 'managed-fork' """ self.log("JobManager.set_default_jobmanager started") gatekeeper_admin = "/usr/sbin/globus-gatekeeper-admin" if not validation.valid_executable(gatekeeper_admin): self.log("%s not found. Ensure the Globus Gatekeeper is installed." % gatekeeper_admin, level=logging.ERROR) return False if default == 'fork': self.log("Setting regular fork manager to be the default jobmanager") result = utilities.run_script([gatekeeper_admin, '-e', 'jobmanager-fork-poll', '-n', 'jobmanager']) if not result: self.log("Could not set the jobmanager-fork-poll to the default " + "jobmanager", level=logging.ERROR) return False result = utilities.run_script([gatekeeper_admin, '-e', 'jobmanager-fork-poll', '-n', 'jobmanager-fork']) if not result: self.log("Could not set the jobmanager-fork-poll to the default " + "jobmanager", level=logging.ERROR) return False elif default == 'managed-fork': self.log("Setting managed fork manager to be the default jobmanager") result = utilities.run_script([gatekeeper_admin, '-e', 'jobmanager-managedfork', '-n', 'jobmanager']) if not result: self.log("Could not set the jobmanager-managedfork to the default " + "jobmanager", level=logging.ERROR) return False result = utilities.run_script([gatekeeper_admin, '-e', 'jobmanager-managedfork', '-n', 'jobmanager-fork']) if not result: self.log("Could not set the jobmanager-managedfork to the default " + "jobmanager", level=logging.ERROR) return False else: self.log("Invalid jobamanger type specified as the default " + "jobmanger: %s" % default, level=logging.ERROR) return False self.log("JobManager.set_default_jobmanager completed") return True
try: contents = open(filename).read() except EnvironmentError, err: self.log(self.MISSING_JOBMANAGER_CONF_MSG % (filename, err), level=logging.ERROR) return False if '-seg-module' not in contents: contents = contents + '-seg-module ' + seg_module if utilities.atomic_write(filename, contents): return True else: self.log('Error enabling SEG in ' + filename, level=logging.ERROR) return False if not utilities.run_script([self.seg_admin_path, '-e', seg_module]): return False return True def disable_seg(self, seg_module, filename): """ Update the globus jobmanager configuration so that it does not allow use the SEG Returns: True if config successfully updated """ if filename is None or seg_module is None: return False if seg_module not in self.lrms:
def set_default_jobmanager(self, default='fork'): """ Set the default jobmanager Arguments: default - Indicates the default jobmanger, currently either 'fork' or 'managed-fork' """ self.log("JobManager.set_default_jobmanager started") gatekeeper_admin = "/usr/sbin/globus-gatekeeper-admin" if not validation.valid_executable(gatekeeper_admin): self.log( "%s not found. Ensure the Globus Gatekeeper is installed." % gatekeeper_admin, level=logging.ERROR) return False if default == 'fork': self.log( "Setting regular fork manager to be the default jobmanager") result = utilities.run_script([ gatekeeper_admin, '-e', 'jobmanager-fork-poll', '-n', 'jobmanager' ]) if not result: self.log( "Could not set the jobmanager-fork-poll to the default " + "jobmanager", level=logging.ERROR) return False result = utilities.run_script([ gatekeeper_admin, '-e', 'jobmanager-fork-poll', '-n', 'jobmanager-fork' ]) if not result: self.log( "Could not set the jobmanager-fork-poll to the default " + "jobmanager", level=logging.ERROR) return False elif default == 'managed-fork': self.log( "Setting managed fork manager to be the default jobmanager") result = utilities.run_script([ gatekeeper_admin, '-e', 'jobmanager-managedfork', '-n', 'jobmanager' ]) if not result: self.log( "Could not set the jobmanager-managedfork to the default " + "jobmanager", level=logging.ERROR) return False result = utilities.run_script([ gatekeeper_admin, '-e', 'jobmanager-managedfork', '-n', 'jobmanager-fork' ]) if not result: self.log( "Could not set the jobmanager-managedfork to the default " + "jobmanager", level=logging.ERROR) return False else: self.log("Invalid jobamanger type specified as the default " + "jobmanger: %s" % default, level=logging.ERROR) return False self.log("JobManager.set_default_jobmanager completed") return True
contents = open(filename).read() except EnvironmentError, err: self.log(self.MISSING_JOBMANAGER_CONF_MSG % (filename, err), level=logging.ERROR) return False if '-seg-module' not in contents: contents = contents + '-seg-module ' + seg_module if utilities.atomic_write(filename, contents): return True else: self.log('Error enabling SEG in ' + filename, level=logging.ERROR) return False if not utilities.run_script([self.seg_admin_path, '-e', seg_module]): return False return True def disable_seg(self, seg_module, filename): """ Update the globus jobmanager configuration so that it does not allow use the SEG Returns: True if config successfully updated """ if filename is None or seg_module is None: return False if seg_module not in self.lrms: