Example #1
0
def main():
    try:
        # read these from config file
        section = "test_size_rotate"
        config_file = "../test_configurations/test_logSupport.yaml"
        config = yaml.load(file(config_file, 'r'))

        log_name = str(config[section]["log_name"])
        extension = str(config[section]["extension"])
        msg_types = str(config[section]["msg_types"])
        max_days = float(config[section]["max_days"])
        min_days = float(config[section]["min_days"])
        max_mbytes = float(config[section]["max_mbytes"])
        backupCount = 5
        compression = ''

        log_dir = "/tmp/%s" % log_name

        logSupport.add_processlog_handler(log_name, log_dir, msg_types, 
                    extension, max_days, min_days, max_mbytes, 
                    backupCount=backupCount, compression=compression)

        log = logging.getLogger(log_name)
        log.info("%s\n" % create_random_string(length=2048))

        return 0
    except:
        return 1
Example #2
0
def main():
    try:
        # read these from config file
        section = "test_size_rotate"
        config_file = "../test_configurations/test_logSupport.yaml"
        config = yaml.load(file(config_file, 'r'))

        log_name = str(config[section]["log_name"])
        extension = str(config[section]["extension"])
        msg_types = str(config[section]["msg_types"])
        max_days = float(config[section]["max_days"])
        min_days = float(config[section]["min_days"])
        max_mbytes = float(config[section]["max_mbytes"])
        backupCount = 5
        compression = ''

        log_dir = "/tmp/%s" % log_name

        logSupport.add_processlog_handler(log_name,
                                          log_dir,
                                          msg_types,
                                          extension,
                                          max_days,
                                          min_days,
                                          max_mbytes,
                                          backupCount=backupCount,
                                          compression=compression)

        log = logging.getLogger(log_name)
        log.info("%s\n" % create_random_string(length=2048))

        return 0
    except:
        return 1
Example #3
0
def main(work_dir):
    startup_time = time.time()

    glideinFrontendConfig.frontendConfig.frontend_descript_file = os.path.join(work_dir, glideinFrontendConfig.frontendConfig.frontend_descript_file)
    frontendDescript = glideinFrontendConfig.FrontendDescript(work_dir)

    # the log dir is shared between the frontend main and the groups, so use a subdir
    logSupport.log_dir = os.path.join(frontendDescript.data['LogDir'], "frontend")

    # Configure frontend process logging
    process_logs = eval(frontendDescript.data['ProcessLogs']) 
    for plog in process_logs:
        logSupport.add_processlog_handler("frontend", logSupport.log_dir, plog['msg_types'], plog['extension'],
                                      int(float(plog['max_days'])),
                                      int(float(plog['min_days'])),
                                      int(float(plog['max_mbytes'])))
    logSupport.log = logging.getLogger("frontend")
    logSupport.log.info("Logging initialized")
    logSupport.log.debug("Frontend startup time: %s" % str(startup_time))

    try:
        cleanup_environ()
        # we use a dedicated config... ignore the system-wide
        os.environ['CONDOR_CONFIG'] = frontendDescript.data['CondorConfig']

        sleep_time = int(frontendDescript.data['LoopDelay'])
        advertize_rate = int(frontendDescript.data['AdvertiseDelay'])
        restart_attempts = int(frontendDescript.data['RestartAttempts'])
        restart_interval = int(frontendDescript.data['RestartInterval'])

        groups = string.split(frontendDescript.data['Groups'], ',')
        groups.sort()

        glideinFrontendMonitorAggregator.monitorAggregatorConfig.config_frontend(os.path.join(work_dir, "monitor"), groups)
    except:
        logSupport.log.exception("Exception occurred configuring monitoring: ")
        raise

    glideinFrontendMonitoring.write_frontend_descript_xml(frontendDescript, os.path.join(work_dir, 'monitor/'))
    
    # create lock file
    pid_obj = glideinFrontendPidLib.FrontendPidSupport(work_dir)

    # start
    pid_obj.register()
    try:
        try:
            spawn(sleep_time, advertize_rate, work_dir,
                  frontendDescript, groups, restart_attempts, restart_interval)
        except KeyboardInterrupt:
            logSupport.log.info("Received signal...exit")
        except:
            logSupport.log.exception("Exception occurred trying to spawn: ")
    finally:
        pid_obj.relinquish()
Example #4
0
def main(startup_dir):
    """
    Reads in the configuration file and starts up the factory
    
    @type startup_dir: String 
    @param startup_dir: Path to glideinsubmit directory
    """
    
    startup_time=time.time()

    # force integrity checks on all the operations
    # I need integrity checks also on reads, as I depend on them
    os.environ['_CONDOR_SEC_DEFAULT_INTEGRITY'] = 'REQUIRED'
    os.environ['_CONDOR_SEC_CLIENT_INTEGRITY'] = 'REQUIRED'
    os.environ['_CONDOR_SEC_READ_INTEGRITY'] = 'REQUIRED'
    os.environ['_CONDOR_SEC_WRITE_INTEGRITY'] = 'REQUIRED'

    glideFactoryInterface.factoryConfig.lock_dir = os.path.join(startup_dir,"lock")

    glideFactoryConfig.factoryConfig.glidein_descript_file = os.path.join(startup_dir, glideFactoryConfig.factoryConfig.glidein_descript_file)
    glideinDescript = glideFactoryConfig.GlideinDescript()
    frontendDescript = glideFactoryConfig.FrontendDescript()

    # Setup the glideFactoryLib.factoryConfig so that we can process the globals classads
    glideFactoryLib.factoryConfig.config_whoamI(glideinDescript.data['FactoryName'], glideinDescript.data['GlideinName'])
    glideFactoryLib.factoryConfig.config_dirs(startup_dir, glideinDescript.data['LogDir'],
                                              glideinDescript.data['ClientLogBaseDir'],
                                              glideinDescript.data['ClientProxiesBaseDir'])

    write_descript(glideinDescript, frontendDescript, os.path.join(startup_dir, 'monitor/'))

    # Set the Log directory
    logSupport.log_dir = os.path.join(glideinDescript.data['LogDir'], "factory")
   
    # Configure factory process logging
    process_logs = eval(glideinDescript.data['ProcessLogs']) 
    for plog in process_logs:
        logSupport.add_processlog_handler("factory", logSupport.log_dir, plog['msg_types'], plog['extension'],
                                      int(float(plog['max_days'])),
                                      int(float(plog['min_days'])),
                                      int(float(plog['max_mbytes'])))
    logSupport.log = logging.getLogger("factory")
    logSupport.log.info("Logging initialized")
    
    try:
        os.chdir(startup_dir)
    except:
        logSupport.log.exception("Unable to change to startup_dir: ")
        raise

    try:        
        if (is_file_old(glideinDescript.default_rsakey_fname, 
                        int(glideinDescript.data['OldPubKeyGraceTime']))):
            # First back and load any existing key
            logSupport.log.info("Backing up and loading old key")
            glideinDescript.backup_and_load_old_key()
            # Create a new key for this run
            logSupport.log.info("Recreating and loading new key")
            glideinDescript.load_pub_key(recreate=True)
        else:
            # Key is recent enough. Just reuse them.
            logSupport.log.info("Key is recent enough, reusing for this run")
            glideinDescript.load_pub_key(recreate=False)
            logSupport.log.info("Loading old key")
            glideinDescript.load_old_rsa_key()
    except:
        logSupport.log.exception("Exception occurred loading factory keys: ")
        raise 
        
    glideFactoryMonitorAggregator.glideFactoryMonitoring.monitoringConfig.my_name = "%s@%s" % (glideinDescript.data['GlideinName'], glideinDescript.data['FactoryName'])

    # check that the GSI environment is properly set
    if not os.environ.has_key('X509_CERT_DIR'):
        if os.path.isdir('/etc/grid-security/certificates'):
            os.environ['X509_CERT_DIR']='/etc/grid-security/certificates'
            logSupport.log.info("Environment variable X509_CERT_DIR not set, defaulting to /etc/grid-security/certificates")
        else:  
            logSupport.log.exception("Environment variable X509_CERT_DIR not set and /etc/grid-security/certificates does not exist. ")
            raise RuntimeError, "Need X509_CERT_DIR to work!"

    glideFactoryInterface.factoryConfig.advertise_use_tcp = (glideinDescript.data['AdvertiseWithTCP'] in ('True', '1'))
    glideFactoryInterface.factoryConfig.advertise_use_multi = (glideinDescript.data['AdvertiseWithMultiple'] in ('True', '1'))
    sleep_time = int(glideinDescript.data['LoopDelay'])
    advertize_rate = int(glideinDescript.data['AdvertiseDelay'])
    restart_attempts = int(glideinDescript.data['RestartAttempts'])
    restart_interval = int(glideinDescript.data['RestartInterval'])

    entries = glideinDescript.data['Entries'].split(',')
    entries.sort()

    glideFactoryMonitorAggregator.monitorAggregatorConfig.config_factory(os.path.join(startup_dir, "monitor"), entries)

    # create lock file
    pid_obj = glideFactoryPidLib.FactoryPidSupport(startup_dir)

    # start
    pid_obj.register()
    try:
        try:
            spawn(sleep_time, advertize_rate, startup_dir,
                  glideinDescript, frontendDescript, entries, restart_attempts, restart_interval)
        except KeyboardInterrupt, e:
            raise e
        except:
            logSupport.log.exception("Exception occurred spawning the factory: "  )
def main(parent_pid, work_dir, group_name):
    startup_time = time.time()

    elementDescript = glideinFrontendConfig.ElementMergedDescript(work_dir, group_name)

    # the log dir is shared between the frontend main and the groups, so use a subdir
    logSupport.log_dir = os.path.join(elementDescript.frontend_data['LogDir'], "group_%s" % group_name)
    
    # Configure frontend group process logging
    process_logs = eval(elementDescript.frontend_data['ProcessLogs']) 
    for plog in process_logs:
        logSupport.add_processlog_handler(group_name, logSupport.log_dir, plog['msg_types'], plog['extension'],
                                      int(float(plog['max_days'])),
                                      int(float(plog['min_days'])),
                                      int(float(plog['max_mbytes'])))
    logSupport.log = logging.getLogger(group_name)
    logSupport.log.info("Logging initialized")
    logSupport.log.debug("Frontend Element startup time: %s" % str(startup_time))

    paramsDescript = glideinFrontendConfig.ParamsDescript(work_dir, group_name)
    attrsDescript = glideinFrontendConfig.AttrsDescript(work_dir,group_name)
    signatureDescript = glideinFrontendConfig.GroupSignatureDescript(work_dir, group_name)
    #
    # We decided we will not use the data from the stage area
    # Leaving it commented in the code, in case we decide in the future
    #  it was a good validation of the Web server health
    #
    #stageArea=glideinFrontendConfig.MergeStageFiles(elementDescript.frontend_data['WebURL'],
    #                                                signatureDescript.signature_type,
    #                                                signatureDescript.frontend_descript_fname,signatureDescript.frontend_descript_signature,
    #                                                group_name,
    #                                                signatureDescript.group_descript_fname,signatureDescript.group_descript_signature)
    # constsDescript=stageArea.get_constants()
    #

    attr_dict=attrsDescript.data

    glideinFrontendMonitoring.monitoringConfig.monitor_dir = os.path.join(work_dir, "monitor/group_%s" % group_name)

    glideinFrontendInterface.frontendConfig.advertise_use_tcp = (elementDescript.frontend_data['AdvertiseWithTCP'] in ('True', '1'))
    glideinFrontendInterface.frontendConfig.advertise_use_multi = (elementDescript.frontend_data['AdvertiseWithMultiple'] in ('True', '1'))

    try:
        dir = os.path.dirname(os.path.dirname(sys.argv[0]))
        glideinFrontendInterface.frontendConfig.glideinwms_version = glideinWMSVersion.GlideinWMSDistro(dir, os.path.join(dir, 'etc/checksum.frontend')).version()
    except:
        logSupport.log.exception("Exception occurred while trying to retrieve the glideinwms version: ")

    if len(elementDescript.merged_data['Proxies']) > 0:
        if not glideinFrontendPlugins.proxy_plugins.has_key(elementDescript.merged_data['ProxySelectionPlugin']):
            logSupport.log.warning("Invalid ProxySelectionPlugin '%s', supported plugins are %s" % (elementDescript.merged_data['ProxySelectionPlugin']), glideinFrontendPlugins.proxy_plugins.keys())
            return 1
        x509_proxy_plugin = glideinFrontendPlugins.proxy_plugins[elementDescript.merged_data['ProxySelectionPlugin']](os.path.join(work_dir, "group_%s" % group_name), glideinFrontendPlugins.createCredentialList(elementDescript))
    else:
        # no proxies, will try to use the factory one
        x509_proxy_plugin = None

    # set the condor configuration and GSI setup globally, so I don't need to worry about it later on
    os.environ['CONDOR_CONFIG'] = elementDescript.frontend_data['CondorConfig']
    os.environ['_CONDOR_CERTIFICATE_MAPFILE'] = elementDescript.element_data['MapFile']
    os.environ['X509_USER_PROXY'] = elementDescript.frontend_data['ClassAdProxy']

    # create lock file
    pid_obj = glideinFrontendPidLib.ElementPidSupport(work_dir, group_name)

    pid_obj.register(parent_pid)
    try:
        try:
            logSupport.log.info("Starting up")
            iterate(parent_pid, elementDescript, paramsDescript, attr_dict, signatureDescript, x509_proxy_plugin)
        except KeyboardInterrupt:
            logSupport.log.info("Received signal...exit")
        except:
            logSupport.log.exception("Unhandled exception, dying: ")
    finally:
        pid_obj.relinquish()