Ejemplo n.º 1
0
 def test_safe_boolcomp(self):
     self.assertTrue(safe_boolcomp("True", True))
     self.assertTrue(safe_boolcomp(True, True))
     self.assertTrue(safe_boolcomp("False", False))
     self.assertTrue(safe_boolcomp(False, False))
     self.assertFalse(safe_boolcomp("True", False))
     self.assertFalse(safe_boolcomp(True, False))
     self.assertFalse(safe_boolcomp("False", True))
     self.assertFalse(safe_boolcomp(False, True))
     self.assertFalse(safe_boolcomp("foo", True))
     self.assertFalse(safe_boolcomp("foo", False))
Ejemplo n.º 2
0
    def derive(self):
        # glidein name does not have a reasonable default
        if self.glidein_name is None:
            raise RuntimeError("Missing glidein name")
        if not cWParams.is_valid_name(self.glidein_name):
            raise RuntimeError("Invalid glidein name '%s'"%self.glidein_name)

        if self.factory_collector=="default":
            raise RuntimeError('"default" is a reserved keyword, cannot be used as factory_collector')

        factoryVersioning = False
        if 'factory_versioning' in self.data and \
               safe_boolcomp(self.data['factory_versioning'], True):
            factoryVersioning = True

        self.stage_dir=self.buildDir(factoryVersioning, self.stage.base_dir)
        self.monitor_dir=self.buildDir(factoryVersioning, self.monitor.base_dir)
        self.submit_dir=self.buildDir(factoryVersioning, self.submit.base_dir)
        self.log_dir=self.buildDir(factoryVersioning, self.submit.base_log_dir)
        self.web_url=self.buildDir(factoryVersioning, self.stage.web_base_url)

        self.client_log_dirs={}
        self.client_proxies_dirs={}
        for fename in self.security.frontends.keys():
            if not cWParams.is_valid_name(fename):
                raise RuntimeError("Invalid frontend name '%s'"%fename)
            if ' ' in self.security.frontends[fename].identity:
                raise RuntimeError("Invalid frontend identity '%s'"%self.security.frontends[fename].identity)

            for scname in self.security.frontends[fename].security_classes.keys():
                username=self.security.frontends[fename].security_classes[scname].username
                self.client_log_dirs[username]=self.buildDir(True, os.path.join(self.submit.base_client_log_dir, "user_%s"%username))
                self.client_proxies_dirs[username]=self.buildDir(True, os.path.join(self.submit.base_client_proxies_dir, "user_%s"%username))

        if not cWParams.is_valid_name(self.factory_name):
            raise RuntimeError("Invalid factory name '%s'"%self.factory_name)

        entry_names=self.entries.keys()
        for entry_name in entry_names:
            if not cWParams.is_valid_name(entry_name):
                raise RuntimeError("Invalid entry name '%s'"%entry_name)

        attr_names=self.attrs.keys()
        for attr_name in attr_names:
            if not cWParams.is_valid_name(attr_name):
                raise RuntimeError("Invalid global attribute name '%s'."%attr_name)
        for entry_name in entry_names:
            attr_names=self.entries[entry_name].attrs.keys()
            for attr_name in attr_names:
                if not cWParams.is_valid_name(attr_name):
                    raise RuntimeError("Invalid entry '%s' attribute name '%s'."%(entry_name, attr_name))
Ejemplo n.º 3
0
def main():
    """Main entrypoint
    """
    config = ConfigParser.ConfigParser(DEFAULTS)
    config.read(CONFIG)
    proxies = config.sections()

    # Verify config sections
    if proxies.count('COMMON') != 1:
        raise ConfigError("there must be only one [COMMON] section in %s" % CONFIG)
    if len([x for x in proxies if x.startswith('PILOT')]) < 1:
        raise ConfigError("there must be at least one [PILOT] section in %s" % CONFIG)

    # Proxies need to be owned by the 'frontend' user
    try:
        fe_user = pwd.getpwnam(config.get('COMMON', 'owner'))
    except KeyError:
        raise RuntimeError("missing 'frontend' user")

    # Load VOMS Admin server info for case-sensitive VO name and for faking the VOMS Admin server URI
    vomses = os.getenv('VOMS_USERCONF', '/etc/vomses')
    with open(vomses, 'r') as _:
        vo_name_map, vo_uri_map = parse_vomses(_.read())

    retcode = 0
    # Proxy renewals
    proxies.remove('COMMON')  # no proxy renewal info in the COMMON section
    for proxy_section in proxies:
        proxy_config = dict(config.items(proxy_section))
        proxy = Proxy(proxy_config['proxy_cert'], proxy_config['proxy_key'],
                      proxy_config['output'], proxy_config['lifetime'],
                      fe_user.pw_uid, fe_user.pw_gid)

        # Users used to be able to control the frequency of the renewal when they were instructed to write their own
        # script and cronjob. Since the automatic proxy renewal cron/timer runs every hour, we allow the users to
        # control this via the 'frequency' config option. If more than 'frequency' hours have elapsed in a proxy's
        # lifetime, renew it. Otherwise, skip the renewal.
        def has_time_left(time_remaining):
            return int(proxy.lifetime)*3600 - time_remaining < int(proxy_config['frequency'])*3600

        if proxy_section == 'FRONTEND':
            if has_time_left(proxy.timeleft()):
                print('Skipping renewal of %s: time remaining within the specified frequency' % proxy.output)
                proxy.cleanup()
                continue
            stdout, stderr, client_rc = voms_proxy_init(proxy)
        elif proxy_section.startswith('PILOT'):
            if has_time_left(proxy.timeleft()) and has_time_left(proxy.actimeleft()):
                print('Skipping renewal of %s: time remaining within the specified frequency' % proxy.output)
                proxy.cleanup()
                continue

            vo_attr = VO(vo_name_map[proxy_config['vo'].lower()], proxy_config['fqan'])

            if safe_boolcomp(proxy_config['use_voms_server'], True):
                stdout, stderr, client_rc = voms_proxy_init(proxy, vo_attr)
            else:
                vo_attr.cert = proxy_config['vo_cert']
                vo_attr.key = proxy_config['vo_key']
                try:
                    vo_attr.uri = vo_uri_map[x509Support.extract_DN(vo_attr.cert)]
                except KeyError:
                    retcode = 1
                    print("ERROR: Failed to renew proxy {0}: ".format(proxy.output) +
                          "Could not find entry in {0} for {1}. ".format(vomses, vo_attr.cert) +
                          "Please verify your VO data installation.")
                    proxy.cleanup()
                    continue
                stdout, stderr, client_rc = voms_proxy_fake(proxy, vo_attr)
        else:
            print("WARNING: Unrecognized configuration section %s found in %s.\n" % (proxy, CONFIG) +
                  "Valid configuration sections: 'FRONTEND' or 'PILOT'.")
            client_rc = -1
            stderr = "Unrecognized configuration section '%s', renewal not attempted." % proxy_section
            stdout = ""

        if client_rc == 0:
            proxy.write()
            print("Renewed proxy from '%s' to '%s'." % (proxy.cert, proxy.output))
        else:
            retcode = 1
            # don't raise an exception here to continue renewing other proxies
            print("ERROR: Failed to renew proxy %s:\n%s%s" % (proxy.output, stdout, stderr))
            proxy.cleanup()

    return retcode
Ejemplo n.º 4
0
    def derive(self):
        if len(self.groups.keys())==0:
            raise ValueError("No groups defined!")
            
        self.validate_names()

        frontendVersioning = False
        if 'frontend_versioning' in self.data and \
               safe_boolcomp(self.data['frontend_versioning'], True):
            frontendVersioning = True
        self.stage_dir=self.buildDir(frontendVersioning, self.stage.base_dir)
        self.monitor_dir=self.buildDir(frontendVersioning, self.monitor.base_dir)
        self.work_dir=self.buildDir(frontendVersioning, self.work.base_dir)
        self.log_dir=self.buildDir(frontendVersioning, self.work.base_log_dir)
        self.web_url=self.buildDir(frontendVersioning, self.stage.web_base_url)
        if hasattr(self.monitor, "web_base_url") and (self.monitor.web_base_url is not None):
            self.monitoring_web_url=self.buildDir(frontendVersioning, self.monitor.web_base_url)
        else:
            self.monitoring_web_url=self.web_url.replace("stage", "monitor")

        self.derive_match_attrs()

        ####################
        has_collector='GLIDEIN_Collector' in self.attrs
        if not has_collector:
            # collector not defined at global level, must be defined in every group
            has_collector=True
            for  group_name in self.groups.keys():
                has_collector&='GLIDEIN_Collector' in self.groups[group_name].attrs

        if has_collector:
            raise RuntimeError("Attribute GLIDEIN_Collector cannot be defined by the user")

        ####################
        has_ccb='GLIDEIN_CCB' in self.attrs
        if not has_collector:
            # collector not defined at global level, must be defined in every group
            has_ccb=True
            for  group_name in self.groups.keys():
                has_ccb&='GLIDEIN_CCB' in self.groups[group_name].attrs

        if has_ccb:
            raise RuntimeError("Attribute GLIDEIN_CCB cannot be defined by the user")

        ####################
        if self.security.proxy_DN is None:
            raise RuntimeError("security.proxy_DN not defined")

        if len(self.collectors)==0:
            raise RuntimeError("At least one pool collector is needed")

        ####################
        has_security_name=(self.security.security_name is not None)
        if not has_security_name:
            # security_name not defined at global level, look if defined in every group
            has_security_name=True
            for  group_name in self.groups.keys():
                has_security_name&=(self.groups[group_name].security.security_name is not None)

        if not has_security_name:
            # explicity define one, so it will not change if config copied
            # it also makes the frontend admins aware of the name
            self.data['security']['security_name']=self.frontend_name

        ####################
        for i in range(len(self.security.credentials)):
            pel=self.subparams.data['security']['credentials'][i]
            if pel['security_class'] is None:
                # define an explicit security, so the admin is aware of it
                pel['security_class']="frontend"
        group_names=self.groups.keys()
        for group_name in group_names:
            for i in range(len(self.groups[group_name].security.credentials)):
                pel=self.subparams.data['groups'][group_name]['security']['credentials'][i]
                if pel['security_class'] is None:
                    # define an explicit security, so the admin is aware of it
                    pel['security_class']="group_%s"%group_name

        # verify and populate HA
        if safe_boolcomp(self.high_availability['enabled'], True):
            if (len(self.high_availability['ha_frontends']) == 1):
                haf = self.high_availability['ha_frontends'][0]
                if not haf['frontend_name']:
                    raise RuntimeError('High availability is enabled but the configuration is missing frontend_name of the master ha_frontend.')
            else:
                raise RuntimeError('Exactly one master ha_frontend information is needed when running this frontend in high_availability slave mode.')
Ejemplo n.º 5
0
    def derive(self):
        if len(self.groups.keys()) == 0:
            raise ValueError("No groups defined!")

        self.validate_names()

        frontendVersioning = False
        if 'frontend_versioning' in self.data and \
               safe_boolcomp(self.data['frontend_versioning'], True):
            frontendVersioning = True
        self.stage_dir = self.buildDir(frontendVersioning, self.stage.base_dir)
        self.monitor_dir = self.buildDir(frontendVersioning,
                                         self.monitor.base_dir)
        self.work_dir = self.buildDir(frontendVersioning, self.work.base_dir)
        self.log_dir = self.buildDir(frontendVersioning,
                                     self.work.base_log_dir)
        self.web_url = self.buildDir(frontendVersioning,
                                     self.stage.web_base_url)
        if hasattr(self.monitor, "web_base_url") and (self.monitor.web_base_url
                                                      is not None):
            self.monitoring_web_url = self.buildDir(frontendVersioning,
                                                    self.monitor.web_base_url)
        else:
            self.monitoring_web_url = self.web_url.replace("stage", "monitor")

        self.derive_match_attrs()

        ####################
        has_collector = 'GLIDEIN_Collector' in self.attrs
        if not has_collector:
            # collector not defined at global level, must be defined in every group
            has_collector = True
            for group_name in self.groups.keys():
                has_collector &= 'GLIDEIN_Collector' in self.groups[
                    group_name].attrs

        if has_collector:
            raise RuntimeError(
                "Attribute GLIDEIN_Collector cannot be defined by the user")

        ####################
        has_ccb = 'GLIDEIN_CCB' in self.attrs
        if not has_collector:
            # collector not defined at global level, must be defined in every group
            has_ccb = True
            for group_name in self.groups.keys():
                has_ccb &= 'GLIDEIN_CCB' in self.groups[group_name].attrs

        if has_ccb:
            raise RuntimeError(
                "Attribute GLIDEIN_CCB cannot be defined by the user")

        ####################
        if self.security.proxy_DN is None:
            raise RuntimeError("security.proxy_DN not defined")

        if len(self.collectors) == 0:
            raise RuntimeError("At least one pool collector is needed")

        ####################
        has_security_name = (self.security.security_name is not None)
        if not has_security_name:
            # security_name not defined at global level, look if defined in every group
            has_security_name = True
            for group_name in self.groups.keys():
                has_security_name &= (
                    self.groups[group_name].security.security_name is not None)

        if not has_security_name:
            # explicity define one, so it will not change if config copied
            # it also makes the frontend admins aware of the name
            self.data['security']['security_name'] = self.frontend_name

        ####################
        for i in range(len(self.security.credentials)):
            pel = self.subparams.data['security']['credentials'][i]
            if pel['security_class'] is None:
                # define an explicit security, so the admin is aware of it
                pel['security_class'] = "frontend"
        group_names = self.groups.keys()
        for group_name in group_names:
            for i in range(len(self.groups[group_name].security.credentials)):
                pel = self.subparams.data['groups'][group_name]['security'][
                    'credentials'][i]
                if pel['security_class'] is None:
                    # define an explicit security, so the admin is aware of it
                    pel['security_class'] = "group_%s" % group_name

        # verify and populate HA
        if safe_boolcomp(self.high_availability['enabled'], True):
            if (len(self.high_availability['ha_frontends']) == 1):
                haf = self.high_availability['ha_frontends'][0]
                if not haf['frontend_name']:
                    raise RuntimeError(
                        'High availability is enabled but the configuration is missing frontend_name of the master ha_frontend.'
                    )
            else:
                raise RuntimeError(
                    'Exactly one master ha_frontend information is needed when running this frontend in high_availability slave mode.'
                )
Ejemplo n.º 6
0
    def derive(self):
        # glidein name does not have a reasonable default
        if self.glidein_name is None:
            raise RuntimeError("Missing glidein name")
        if not cWParams.is_valid_name(self.glidein_name):
            raise RuntimeError("Invalid glidein name '%s'" % self.glidein_name)

        if self.factory_collector == "default":
            raise RuntimeError(
                '"default" is a reserved keyword, cannot be used as factory_collector'
            )

        factoryVersioning = False
        if 'factory_versioning' in self.data and \
               safe_boolcomp(self.data['factory_versioning'], True):
            factoryVersioning = True

        self.stage_dir = self.buildDir(factoryVersioning, self.stage.base_dir)
        self.monitor_dir = self.buildDir(factoryVersioning,
                                         self.monitor.base_dir)
        self.submit_dir = self.buildDir(factoryVersioning,
                                        self.submit.base_dir)
        self.log_dir = self.buildDir(factoryVersioning,
                                     self.submit.base_log_dir)
        self.web_url = self.buildDir(factoryVersioning,
                                     self.stage.web_base_url)

        self.client_log_dirs = {}
        self.client_proxies_dirs = {}
        for fename in self.security.frontends.keys():
            if not cWParams.is_valid_name(fename):
                raise RuntimeError("Invalid frontend name '%s'" % fename)
            if ' ' in self.security.frontends[fename].identity:
                raise RuntimeError("Invalid frontend identity '%s'" %
                                   self.security.frontends[fename].identity)

            for scname in self.security.frontends[
                    fename].security_classes.keys():
                username = self.security.frontends[fename].security_classes[
                    scname].username
                self.client_log_dirs[username] = self.buildDir(
                    True,
                    os.path.join(self.submit.base_client_log_dir,
                                 "user_%s" % username))
                self.client_proxies_dirs[username] = self.buildDir(
                    True,
                    os.path.join(self.submit.base_client_proxies_dir,
                                 "user_%s" % username))

        if not cWParams.is_valid_name(self.factory_name):
            raise RuntimeError("Invalid factory name '%s'" % self.factory_name)

        entry_names = self.entries.keys()
        for entry_name in entry_names:
            if not cWParams.is_valid_name(entry_name):
                raise RuntimeError("Invalid entry name '%s'" % entry_name)

        attr_names = self.attrs.keys()
        for attr_name in attr_names:
            if not cWParams.is_valid_name(attr_name):
                raise RuntimeError("Invalid global attribute name '%s'." %
                                   attr_name)
        for entry_name in entry_names:
            attr_names = self.entries[entry_name].attrs.keys()
            for attr_name in attr_names:
                if not cWParams.is_valid_name(attr_name):
                    raise RuntimeError(
                        "Invalid entry '%s' attribute name '%s'." %
                        (entry_name, attr_name))
Ejemplo n.º 7
0
    def test_some_iterate_one_artifacts(self):
        """
        Mock our way into glideinFrontendElement:iterate_one() to test if
             glideinFrontendElement.glidein_dict['entry_point']['attrs']['GLIDEIN_REQUIRE_VOMS']
                and
             glideinFrontendElement.glidein_dict['entry_point']['attrs']['GLIDEIN_REQUIRE_GLEXEC_USE']
                and
             glideinFrontendElement.glidein_dict['entry_point']['attrs']['GLIDEIN_In_Downtime']

             are being evaluated correctly
        """

        self.gfe.stats = {'group': glideinFrontendMonitoring.groupStats()}
        self.gfe.published_frontend_name = '%s.XPVO_%s' % (
            self.gfe.frontend_name, self.gfe.group_name)
        mockery = mock.MagicMock()
        self.gfe.x509_proxy_plugin = mockery
        # keep logSupport.log.info in an array to search through later to
        # evaluate success
        glideinwms.frontend.glideinFrontendLib.logSupport.log = mockery
        mockery.info = log_info_side_effect

        # ForkManager mocked inside iterate_one, return data loaded from
        # fork_and_collect_side_effect
        # data loaded includes both legal True, False, 'True', 'False' , 'TRUE' etc
        # and obviously bad data 1, 0, etc

        with mock.patch.object(ForkManager,
                               'fork_and_collect',
                               return_value=fork_and_collect_side_effect()):
            with mock.patch.object(
                    ForkManager,
                    'bounded_fork_and_collect',
                    return_value=bounded_fork_and_collect_side_effect()):
                # also need to mock advertisers so they don't fork off jobs
                # it has nothing to do with what is being tested here
                with mock.patch.object(glideinFrontendInterface,
                                       'MultiAdvertizeWork'):
                    with mock.patch(
                            'glideinFrontendInterface.ResourceClassadAdvertiser.advertiseAllClassads',
                            return_value=None):
                        with mock.patch.object(glideinFrontendInterface,
                                               'ResourceClassadAdvertiser'):
                            # finally run iterate_one and collect the log data
                            self.gfe.iterate_one()

        # go through glideinFrontendElement data structures
        # collecting data to match against log output
        glideid_list = sorted(
            self.gfe.condorq_dict_types['Idle']['count'].keys())
        glideids = []
        in_downtime = {}
        req_voms = {}
        req_glexec = {}
        for elm in glideid_list:
            if elm and elm[0]:
                glideid_str = "%s@%s" % (str(elm[1]), str(elm[0]))
                gdata = self.gfe.glidein_dict[elm]['attrs']
                glideids.append(glideid_str)
                in_downtime[glideid_str] = safe_boolcomp(
                    gdata.get('GLIDEIN_In_Downtime'), True)
                req_voms[glideid_str] = safe_boolcomp(
                    gdata.get('GLIDEIN_REQUIRE_VOMS'), True)
                req_glexec[glideid_str] = safe_boolcomp(
                    gdata.get('GLIDEIN_REQUIRE_GLEXEC_USE'), True)

        # run through the info log
        # if GLIDEIN_REQUIRE_VOMS was set to True, 'True', 'tRUE' etc for an entry:
        #    'Voms Proxy Required,' will appear in previous line of log
        # elif GLIDEIN_REQUIRE_GLEXEC_USE was set:
        #     'Proxy required (GLEXEC)' will appear in log
        idx = 0
        for lgln in LOG_DATA:
            parts = lgln.split()
            gid = parts[-1]
            if gid in glideids:
                upordown = parts[-2]
                fmt_str = "glideid:%s in_downtime:%s req_voms:%s "
                fmt_str += "req_glexec:%s\nlog_data:%s"
                state = fmt_str % (gid, in_downtime[gid], req_voms[gid],
                                   req_glexec[gid], LOG_DATA[idx - 1])
                if self.debug_output:
                    print('%s' % state)
                use_voms = req_voms[gid]
                use_glexec = req_glexec[gid]

                if in_downtime[gid]:
                    self.assertTrue(upordown == 'Down',
                                    "%s logs this as %s" % (gid, upordown))
                else:
                    self.assertTrue(upordown == 'Up',
                                    "%s logs this as %s" % (gid, upordown))

                if use_voms:
                    self.assertTrue(
                        'Voms proxy required,' in LOG_DATA[idx - 1], state)
                else:
                    self.assertFalse(
                        'Voms proxy required,' in LOG_DATA[idx - 1], state)
                    if use_glexec:
                        self.assertTrue(
                            'Proxy required (GLEXEC)' in LOG_DATA[idx - 1],
                            state)
                    else:
                        self.assertFalse(
                            'Proxy required (GLEXEC)' in LOG_DATA[idx - 1],
                            state)

            idx += 1
Ejemplo n.º 8
0
def main():
    """Main entrypoint
    """
    config = ConfigParser.ConfigParser(DEFAULTS)
    config.read(CONFIG)
    proxies = config.sections()

    # Verify config sections
    if proxies.count('COMMON') != 1:
        raise ConfigError("there must be only one [COMMON] section in %s" % CONFIG)
    if len([x for x in proxies if x.startswith('PILOT')]) < 1:
        raise ConfigError("there must be at least one [PILOT] section in %s" % CONFIG)

    # Proxies need to be owned by the 'frontend' user
    try:
        fe_user = pwd.getpwnam(config.get('COMMON', 'owner'))
    except KeyError:
        raise RuntimeError("missing 'frontend' user")

    # Load VOMS Admin server info for case-sensitive VO name and for faking the VOMS Admin server URI
    vomses = os.getenv('VOMS_USERCONF', '/etc/vomses')
    with open(vomses, 'r') as _:
        # "<VO ALIAS> " "<VOMS ADMIN HOSTNAME>" "<VOMS ADMIN PORT>" "<VOMS CERT DN>" "<VO NAME>"
        # "osg" "voms.grid.iu.edu" "15027" "/DC=org/DC=opensciencegrid/O=Open Science Grid/OU=Services/CN=voms.grid.iu.edu" "osg"
        vo_info = re.findall(r'"\w+"\s+"([^"]+)"\s+"(\d+)"\s+"([^"]+)"\s+"(\w+)"', _.read(), re.IGNORECASE)
        # VO names are case-sensitive but we don't expect users to get the case right in the proxies.ini
        vo_name_map = dict([(vo[3].lower(), vo[3]) for vo in vo_info])
        # A mapping between VO certificate subject DNs and VOMS URI of the form "<HOSTNAME>:<PORT>"
        # We had to separate this out from the VO name because a VO could have multiple vomses entries
        vo_uri_map = dict([(vo[2], vo[0] + ':' + vo[1]) for vo in vo_info])

    retcode = 0
    # Proxy renewals
    proxies.remove('COMMON')  # no proxy renewal info in the COMMON section
    for proxy_section in proxies:
        proxy_config = dict(config.items(proxy_section))
        proxy = Proxy(proxy_config['proxy_cert'], proxy_config['proxy_key'],
                      proxy_config['output'], proxy_config['lifetime'],
                      fe_user.pw_uid, fe_user.pw_gid)

        # Users used to be able to control the frequency of the renewal when they were instructed to write their own
        # script and cronjob. Since the automatic proxy renewal cron/timer runs every hour, we allow the users to
        # control this via the 'frequency' config option. If more than 'frequency' hours have elapsed in a proxy's
        # lifetime, renew it. Otherwise, skip the renewal.
        def has_time_left(time_remaining):
            return int(proxy.lifetime)*3600 - time_remaining < int(proxy_config['frequency'])*3600

        if proxy_section == 'FRONTEND':
            if has_time_left(proxy.timeleft()):
                print('Skipping renewal of %s: time remaining within the specified frequency' % proxy.output)
                proxy.cleanup()
                continue
            stdout, stderr, client_rc = voms_proxy_init(proxy)
        elif proxy_section.startswith('PILOT'):
            if has_time_left(proxy.timeleft()) and has_time_left(proxy.actimeleft()):
                print('Skipping renewal of %s: time remaining within the specified frequency' % proxy.output)
                proxy.cleanup()
                continue

            vo_attr = VO(vo_name_map[proxy_config['vo'].lower()], proxy_config['fqan'])

            if safe_boolcomp(proxy_config['use_voms_server'], True):
                # we specify '-order' because some European CEs care about VOMS AC order
                # The '-order' option chokes if a Capability is specified but we want to make sure we request it
                # in '-voms' because we're not sure if anything is looking for it
                fqan = re.sub(r'\/Capability=\w+$', '', vo_attr.fqan)
                stdout, stderr, client_rc = voms_proxy_init(proxy, '-voms', vo_attr.voms, '-order', fqan)
            else:
                vo_attr.cert = proxy_config['vo_cert']
                vo_attr.key = proxy_config['vo_key']
                try:
                    vo_attr.uri = vo_uri_map[x509Support.extract_DN(vo_attr.cert)]
                except KeyError:
                    retcode = 1
                    print("ERROR: Failed to renew proxy {0}: ".format(proxy.output) +
                          "Could not find entry in {0} for {1}. ".format(vomses, vo_attr.cert) +
                          "Please verify your VO data installation.")
                    proxy.cleanup()
                    continue
                stdout, stderr, client_rc = voms_proxy_fake(proxy, vo_attr)
        else:
            print("WARNING: Unrecognized configuration section %s found in %s.\n" % (proxy, CONFIG) +
                  "Valid configuration sections: 'FRONTEND' or 'PILOT'.")
            client_rc = -1
            stderr = "Unrecognized configuration section '%s', renewal not attempted." % proxy_section
            stdout = ""

        if client_rc == 0:
            proxy.write()
            print("Renewed proxy from '%s' to '%s'." % (proxy.cert, proxy.output))
        else:
            retcode = 1
            # don't raise an exception here to continue renewing other proxies
            print("ERROR: Failed to renew proxy %s:\n%s%s" % (proxy.output, stdout, stderr))
            proxy.cleanup()

    return retcode