def use_defaults(self, defaults): for k in defaults.keys(): defel=defaults[k] if isinstance(defel, xmlParse.OrderedDict): # subdictionary if k not in self.data: self.data[k]=xmlParse.OrderedDict() # first create empty, if does not exist # then, set defaults on all elements of subdictionary self[k].use_defaults(defel) else: # final element defvalue, ktype, txt, subdef=defel if isinstance(defvalue, xmlParse.OrderedDict): # dictionary el elements if k not in self.data: self.data[k]=xmlParse.OrderedDict() # no elements yet, set and empty dictionary else: # need to set defaults on all elements in the dictionary data_el=self[k] for data_subkey in data_el.keys(): data_el[data_subkey].use_defaults(subdef) elif isinstance(defvalue, list): # list of elements if k not in self.data: self.data[k]=[] # no elements yet, set and empty list else: # need to set defaults on all elements in the list for data_el in self[k]: data_el.use_defaults(subdef) else: # a simple value if k not in self.data: self.data[k]=copy.deepcopy(defvalue)
def __init__(self, usage_prefix, src_dir, argv): self.usage_prefix=usage_prefix # support dir self.src_dir=src_dir # initialize the defaults self.defaults=xmlParse.OrderedDict() self.init_defaults() try: if len(argv)<2: raise RuntimeError("Missing config file") if argv[1]=="-help": raise RuntimeError("\nA config file will contain:\n%s\n\nThe config file will be in XML format."%self.get_description(" ")) self.cfg_name=os.path.abspath(argv[1]) self.load_file(self.cfg_name) self.subparams.validate(self.defaults, self.get_top_element()) # make a copy of the loaded data, so that I can always tell what was derived and what was not self.org_data=copy.deepcopy(self.data) self.subparams.use_defaults(self.defaults) # create derived values self.derive() except RuntimeError as e: raise RuntimeError("Unexpected error occurred loading the configuration file.\n\n%s" % e) pass
def loadMatchAttrs(self): """ If given match_attr i.e. factory_match_attr or job_match_attr exits load it from the pyObject """ # match_attrs = {} match_attrs = {'factory_match_attrs': {}, 'job_match_attrs': {}} for ma_name in ('factory_match_attrs', 'job_match_attrs'): if (ma_name in dir(self.pyObject)): ma_attr = getattr(self.pyObject, ma_name) # Check if the match_attr is of dict type # TODO: Also need to check that match_attr is of string/int/bool if isinstance(ma_attr, dict): data = xmlParse.OrderedDict() for k, v in ma_attr.iteritems(): data[k] = xmlParse.OrderedDict(v) match_attrs[ma_name] = data else: # Raise error if match_attr is not of type dict raise MatchPolicyContentError(self.file, ma_name, type(ma_attr).__name__, 'dict') return match_attrs
def get_xml(self): old_default_ignore_nones=xmlFormat.DEFAULT_IGNORE_NONES old_default_lists_params=xmlFormat.DEFAULT_LISTS_PARAMS old_default_dicts_params=xmlFormat.DEFAULT_DICTS_PARAMS xmlFormat.DEFAULT_IGNORE_NONES=True # these are used internally, do not need to be ordered xml_format=self.get_xml_format() xmlFormat.DEFAULT_LISTS_PARAMS=xml_format['lists_params'] xmlFormat.DEFAULT_DICTS_PARAMS=xml_format['dicts_params'] # hack needed to make xmlFormat to properly do the formating, using override_dictionary_type dict_override=type(xmlParse.OrderedDict()) out=xmlFormat.class2string(self.data, self.get_top_element(), override_dictionary_type=dict_override) xmlFormat.DEFAULT_IGNORE_NONES=old_default_ignore_nones xmlFormat.DEFAULT_LISTS_PARAMS=old_default_lists_params xmlFormat.DEFAULT_DICTS_PARAMS=old_default_dicts_params return out
def init_defaults(self): self.init_support_defaults() # VO scripts should start after the factory has been set completely # but there could be exceptions # Files/Validation/Custom scripts settings for frontend self.file_defaults["after_entry"] = ( "True", 'Bool', 'Should this file be loaded after the factory entry ones?', None) # Publishing attr specific to frontend self.attr_defaults["type"] = [ "string", "string|int|expr", "What kind on data is value. (if expr, a python expression with access to frontend and glidein dictionaries)", None ] # Config section exclusive to frontend group group_config_defaults = cWParams.commentedOrderedDict() group_config_running_defaults = cWParams.commentedOrderedDict() group_config_running_defaults["max"] = [ '10000', "nr_jobs", "What is the max number of running glideins I want to get to", None ] group_config_running_defaults["min"] = [ '0', "nr_jobs", "Min number of running glideins with an empty/small queue.", None ] group_config_running_defaults["relative_to_queue"] = [ '1.15', "fraction", "Max relative to number of matching jobs in the queue.", None ] group_config_defaults[ 'running_glideins_per_entry'] = group_config_running_defaults # This is a string because, we want to distinguish a value from missing (""), only a value overrides the corresponding default or global setting group_config_defaults['ignore_down_entries'] = [ "", "String", "If set to True or False the group setting will override the global value (or its default, False)." " When True the frontend will ignore down entries during matching counts", None ] common_config_running_total_defaults = cWParams.commentedOrderedDict() common_config_running_total_defaults["max"] = [ '100000', "nr_jobs", "What is the max number of running glideins I want to get to - globally", None ] common_config_running_total_defaults["curb"] = [ '90000', "nr_jobs", "When should I start curbing glidein submission", None ] group_config_defaults[ 'running_glideins_total'] = common_config_running_total_defaults group_config_idle_defaults = cWParams.commentedOrderedDict() group_config_idle_defaults["max"] = [ '100', "nr_jobs", "How much pressure should I apply to the entry points", None ] group_config_idle_defaults["reserve"] = [ '5', "nr_jobs", "How much to overcommit.", None ] group_config_defaults[ 'idle_glideins_per_entry'] = group_config_idle_defaults group_config_lifetime_defaults = cWParams.commentedOrderedDict() group_config_lifetime_defaults["max"] = [ '0', "NR", "How long idle glideins are kept in the factory queue (seconds)", None ] group_config_defaults[ 'idle_glideins_lifetime'] = group_config_lifetime_defaults group_config_vms_defaults = cWParams.commentedOrderedDict() group_config_vms_defaults["max"] = [ '100', "nr_vms", "How many idle VMs should I tollerate, before stopping submitting glideins", None ] group_config_vms_defaults["curb"] = [ '5', "nr_vms", "How many idle VMs should I tollerate, before starting to curb submissions.", None ] group_config_defaults['idle_vms_per_entry'] = group_config_vms_defaults # Global config section common_config_vms_total_defaults = cWParams.commentedOrderedDict() common_config_vms_total_defaults["max"] = [ '1000', "nr_jobs", "How many total idle VMs should I tollerate, before stopping submitting glideins", None ] common_config_vms_total_defaults["curb"] = [ '200', "nr_jobs", "How many total idle VMs should I tollerate, before starting to curb submissions.", None ] group_config_defaults[ 'idle_vms_total'] = common_config_vms_total_defaults group_config_proc_work_defaults = cWParams.commentedOrderedDict() group_config_proc_work_defaults["matchmakers"] = [ '3', "NR", "Max number of worker processes that will be doing the matchmaking", None ] group_config_defaults[ 'processing_workers'] = group_config_proc_work_defaults group_config_removal_defaults = cWParams.commentedOrderedDict() group_config_removal_defaults["type"] = [ 'NO', "ALL|IDLE|WAIT|NO", "Trigger the removal of these glideins", None ] group_config_removal_defaults["wait"] = [ '0', "NR", "Time without requests to wait before triggering the removal (cycles)", None ] group_config_removal_defaults["requests_tracking"] = [ 'False', "Bool", "Remove glideins as soon as the requests are less than the available glideins (instead of 0)", None ] group_config_removal_defaults["margin"] = [ '0', "NR", "How closely to follow the number of requests", None ] group_config_defaults[ 'glideins_removal'] = group_config_removal_defaults # not exported and order does not matter, can stay a regular dictionary sub_defaults = { 'attrs': (xmlParse.OrderedDict(), 'Dictionary of attributes', "Each attribute group contains", self.attr_defaults), 'files': ([], 'List of files', "Each file group contains", self.file_defaults) } # User Pool collectors collector_defaults = cWParams.commentedOrderedDict() collector_defaults["node"] = ( None, "nodename", "Factory collector node name (for example, fg2.my.org:9999)", None) collector_defaults["DN"] = ( None, "dn", "Factory collector distinguised name (subject) (for example, /DC=org/DC=myca/OU=Services/CN=fg2.my.org)", None) collector_defaults["factory_identity"] = ( "*****@*****.**", "authenticated_identity", "What is the AuthenticatedIdentity of the factory at the WMS collector", None) collector_defaults["my_identity"] = ( "*****@*****.**", "authenticated_identity", "What is the AuthenticatedIdentity of my proxy at the WMS collector", None) # User schedulers schedd_defaults = cWParams.commentedOrderedDict() schedd_defaults["fullname"] = ( None, "name", "User schedd name (for example, [email protected])", None) schedd_defaults["DN"] = ( None, "dn", "User schedd distinguised name (subject) (for example, /DC=org/DC=myca/OU=Services/CN=sb1.my.org)", None) # match_attr for factory and job query_expr query_attrs_defaults = cWParams.commentedOrderedDict() query_attrs_defaults['type'] = ('string', 'string|int|real|bool', 'Attribute type', None) # Factory and job query_expr fj_match_defaults = cWParams.commentedOrderedDict() fj_match_defaults["query_expr"] = [ 'True', 'CondorExpr', 'Expression for selecting user jobs', None ] fj_match_defaults["match_attrs"] = (xmlParse.OrderedDict(), "Dictionary of ClassAd attributes", "Each attribute contains", query_attrs_defaults) # Factory match settings factory_match_defaults = copy.deepcopy(fj_match_defaults) factory_match_defaults["collectors"] = ([], "List of factory collectors", "Each collector contains", collector_defaults) # Job match settings job_match_defaults = copy.deepcopy(fj_match_defaults) job_match_defaults["schedds"] = ([], "List of user schedds", "Each schedd contains", schedd_defaults) # Match section. Aka VO policies. match_defaults = cWParams.commentedOrderedDict() match_defaults["factory"] = factory_match_defaults match_defaults["job"] = job_match_defaults match_defaults["match_expr"] = ( 'True', 'PythonExpr', 'Python expression for matching jobs to factory entries with access to job and glidein dictionaries', None) match_defaults["start_expr"] = ( 'True', 'CondorExpr', 'Condor expression for matching jobs to glideins at runtime', None) match_defaults["policy_file"] = ( None, 'PolicyFile', 'External policy file where match_expr, query_expr, start_expr and match_attr are defined', None) # Credential settings proxy_defaults = cWParams.commentedOrderedDict() proxy_defaults["absfname"] = ( None, "fname", "x509 proxy file name (see also pool_idx_list)", None) proxy_defaults["keyabsfname"] = ( None, "fname", "for key files, file name of the key pair", None) proxy_defaults["pilotabsfname"] = ( None, "fname", "to specify a different pilot proxy instead of using submit proxy", None) proxy_defaults["type"] = ( "grid_proxy", "credential type", "Type of credential: grid_proxy,cert_pair,key_pair,username_password,auth_file", None) proxy_defaults["trust_domain"] = ("OSG", "grid_type", "Trust Domain", None) proxy_defaults["creation_script"] = (None, "command", "Script to re-create credential", None) proxy_defaults["update_frequency"] = ( None, "int", "Update proxy when there is this much time left", None) proxy_defaults["remote_username"] = ( None, "username", "User name at the remote resource", None) proxy_defaults["vm_id"] = (None, "vm_id", "VM Id", None) proxy_defaults["vm_type"] = (None, "vm_type", "VM Type", None) proxy_defaults["pool_idx_len"] = ( None, "boolean", "Adds leading zeros to the suffix so all filenames the same length", None) proxy_defaults["pool_idx_list"] = ( None, "string", "List of indices, can include ranges of indices", None) proxy_defaults["security_class"] = ( None, "id", "Proxies in the same security class can potentially access each other (Default: proxy_nr)", None) proxy_defaults["vm_id_fname"] = (None, "fname", "to specify a vm id without reconfig", None) proxy_defaults["vm_type_fname"] = ( None, "fname", "to specify a vm type without reconfig", None) proxy_defaults["project_id"] = (None, "string", "OSG Project ID. Ex TG-12345", None) security_defaults = cWParams.commentedOrderedDict() security_defaults["proxy_selection_plugin"] = ( None, "proxy_name", "Which credentials selection plugin should I use (ProxyAll if None)", None) security_defaults["credentials"] = ([], 'List of credentials', "Each credential element contains", proxy_defaults) security_defaults["security_name"] = ( None, "frontend_name", "What name will we advertize for security purposes?", None) self.group_defaults = cWParams.commentedOrderedDict() self.group_defaults["match"] = match_defaults self.group_defaults["enabled"] = ("True", "Bool", "Is this group enabled?", None) self.group_defaults["config"] = group_config_defaults self.group_defaults["attrs"] = sub_defaults['attrs'] self.group_defaults["files"] = sub_defaults['files'] self.group_defaults["security"] = copy.deepcopy(security_defaults) ############################### # Start defining the defaults self.defaults["downtimes_file"] = ('frontenddowntime', 'string', 'Frontend Downtime File', None) self.defaults["frontend_name"] = (socket.gethostname(), 'ID', 'VO Frontend name', None) self.defaults['frontend_versioning'] = ( 'True', 'Bool', 'Should we create versioned subdirectories of the type frontend_$frontend_name?', None) self.defaults['frontend_monitor_index_page'] = ( 'True', 'Bool', 'Should we create an index.html in the monitoring web directory?', None) work_defaults = cWParams.commentedOrderedDict() work_defaults["base_dir"] = ("%s/frontstage" % os.environ["HOME"], "base_dir", "Frontend base dir", None) work_defaults["base_log_dir"] = ("%s/frontlogs" % os.environ["HOME"], "log_dir", "Frontend base log dir", None) self.defaults["work"] = work_defaults process_log_defaults = cWParams.commentedOrderedDict() process_log_defaults["min_days"] = [ "3.0", "days", "Min number of days the logs must be preserved (even if they use too much space)", None ] process_log_defaults["max_days"] = [ "7.0", "days", "Max number of days the logs should be preserved", None ] process_log_defaults["max_mbytes"] = [ "100.0", "Mbytes", "Max number of Mbytes the logs can use", None ] process_log_defaults['extension'] = [ "all", "string", "name of the log extention", None ] process_log_defaults['msg_types'] = [ "INFO, WARN, ERR", "string", "types of log messages", None ] process_log_defaults['backup_count'] = [ "5", "string", "Number of backup logs to keep", None ] process_log_defaults['compression'] = [ "", "string", "Compression for backup log files", None ] log_retention_defaults = cWParams.commentedOrderedDict() log_retention_defaults["process_logs"] = ( [], 'Dictionary of log types', "Each log corresponds to a log file", copy.deepcopy(process_log_defaults)) self.defaults["log_retention"] = log_retention_defaults monitor_footer_defaults = cWParams.commentedOrderedDict() monitor_footer_defaults["display_txt"] = [ "", "string", "what will be displayed at the bottom of the monitoring page", None ] monitor_footer_defaults["href_link"] = [ "", "string", "where to link to", None ] self.defaults["monitor_footer"] = monitor_footer_defaults self.defaults['loop_delay'] = ('60', 'seconds', 'Number of seconds between iterations', None) self.defaults['advertise_delay'] = ('5', 'NR', 'Advertize evert NR loops', None) self.defaults['advertise_with_tcp'] = ( 'True', 'Bool', 'Should condor_advertise use TCP connections?', None) self.defaults['advertise_with_multiple'] = ( 'True', 'Bool', 'Should condor_advertise use -multiple?', None) self.defaults['group_parallel_workers'] = ( '2', 'NR', 'Max number of parallel workers that process the group policies', None) self.defaults['restart_attempts'] = ( '3', 'NR', 'Max allowed NR restarts every restart_interval before shutting down', None) self.defaults['restart_interval'] = ( '1800', 'NR', 'Time interval NR sec which allow max restart attempts', None) stage_defaults = cWParams.commentedOrderedDict() stage_defaults["base_dir"] = ("/var/www/html/vofrontend/stage", "base_dir", "Stage base dir", None) stage_defaults["web_base_url"] = ("http://%s/vofrontend/stage" % socket.gethostname(), 'base_url', 'Base Web server URL', None) stage_defaults["use_symlink"] = ( "True", "Bool", "Can I symlink stage dir from work dir?", None) self.defaults["stage"] = stage_defaults self.monitor_defaults["base_dir"] = ( "/var/www/html/vofrontend/monitor", "base_dir", "Monitoring base dir", None) self.monitor_defaults["web_base_url"] = (None, "web_base_url", "Monitoring base dir", None) self.defaults["monitor"] = self.monitor_defaults pool_collector_defaults = cWParams.commentedOrderedDict() pool_collector_defaults["node"] = ( None, "nodename", "Pool collector node name (for example, col1.my.org:9999)", None) pool_collector_defaults["DN"] = ( None, "dn", "Pool collector distinguised name (subject) (for example, /DC=org/DC=myca/OU=Services/CN=col1.my.org)", None) pool_collector_defaults["secondary"] = ( "False", "Bool", "Secondary nodes will be used by glideins, if present", None) pool_collector_defaults["group"] = ( "default", "string", "Collector group name useful to group HA setup", None) self.defaults["collectors"] = ([], 'List of pool collectors', "Each proxy collector contains", pool_collector_defaults) ccb_defaults = cWParams.commentedOrderedDict() ccb_defaults["node"] = ( None, "nodename", "CCB collector node name (for example, ccb1.my.org:9999)", None) ccb_defaults["DN"] = ( None, "dn", "CCB collector distinguised name (subject) (for example, /DC=org/DC=myca/OU=Services/CN=ccb1.my.org)", None) ccb_defaults["group"] = ( "default", "string", "CCB collector group name useful to group HA setup", None) self.defaults["ccbs"] = ([], 'List of CCB collectors', "Each CCB contains", ccb_defaults) self.defaults["security"] = copy.deepcopy(security_defaults) self.defaults["security"]["classad_proxy"] = ( None, "fname", "File name of the proxy used for talking to the WMS collector", None) self.defaults["security"]["proxy_DN"] = ( None, "dn", "Distinguised name (subject) of the proxy (for example, /DC=org/DC=myca/OU=Services/CN=fe1.my.org)", None) self.defaults["security"]["sym_key"] = ( "aes_256_cbc", "sym_algo", "Type of symetric key system used for secure message passing", None) self.defaults["match"] = copy.deepcopy(match_defaults) # Change default match value # By default we want to look only for vanilla universe jobs # that are not monitoring jobs self.defaults["match"]["job"]["query_expr"][ 0] = '(JobUniverse==5)&&(GLIDEIN_Is_Monitor =!= TRUE)&&(JOB_Is_Monitor =!= TRUE)' self.defaults["attrs"] = sub_defaults['attrs'] self.defaults["files"] = copy.deepcopy(sub_defaults['files']) # ordering is specific to global section of factory self.defaults["files"][3]["after_group"] = ( "False", 'Bool', 'Should this file be loaded after the group ones?', None) global_config_defaults = cWParams.commentedOrderedDict() global_config_defaults['ignore_down_entries'] = [ "False", "Bool", "If set the frontend will ignore down entries during matching counts", None ] global_config_defaults['idle_vms_total'] = copy.deepcopy( common_config_vms_total_defaults) global_config_defaults['idle_vms_total_global'] = copy.deepcopy( common_config_vms_total_defaults) global_config_defaults['running_glideins_total'] = copy.deepcopy( common_config_running_total_defaults) global_config_defaults[ 'running_glideins_total_global'] = copy.deepcopy( common_config_running_total_defaults) self.defaults["config"] = global_config_defaults self.defaults["groups"] = (xmlParse.OrderedDict(), "Dictionary of groups", "Each group contains", self.group_defaults) # Initialize the external policy modules data structure self.match_policy_modules = { 'frontend': None, 'groups': {}, } # High Availability Configuration settings haf_defaults = cWParams.commentedOrderedDict() haf_defaults['frontend_name'] = (None, 'frontend_name', 'Name of the frontend', None) ha_defaults = cWParams.commentedOrderedDict() ha_defaults['ha_frontends'] = ([], 'List of frontends in HA mode', 'Each element contains', haf_defaults) ha_defaults["enabled"] = ('False', 'Bool', 'Enable HA?', None) ha_defaults["check_interval"] = ( '300', 'NR', 'How frequently should slav check if the master is down', None) #ha_defaults["activation_delay"]=('150', 'NR', 'How many sec to wait before slav activates after detecting that master is down', None) self.defaults['high_availability'] = ha_defaults return
def init_defaults(self): """ Populates the defaults for all the factory configuration values. """ self.init_support_defaults() # Defaults for allowing frontends in a whitelist # in the factory config (per entry point) self.allow_defaults = cWParams.commentedOrderedDict() self.allow_defaults["name"] = (None, "string", "frontend name", None) self.allow_defaults["security_class"] = ("All", "string", "security class", None) # publishing specific to factory self.attr_defaults["publish"] = ( "True", "Bool", "Should it be published by the factory?", None) self.attr_defaults["const"] = ( "True", "Bool", "Should it be constant? (Else it can be overriden by the frontend. Used only if parameter is True.)", None) self.infosys_defaults = cWParams.commentedOrderedDict() self.infosys_defaults["type"] = (None, "RESS|BDII", "Type of information system", None) self.infosys_defaults["server"] = (None, "host", "Location of the infosys server", None) self.infosys_defaults["ref"] = ( None, "id", "Referenced for the entry point in the infosys", None) self.mongroup_defaults = cWParams.commentedOrderedDict() self.mongroup_defaults["group_name"] = (None, "groupname", "Name of the monitoring group", None) entry_config_defaults = cWParams.commentedOrderedDict() entry_config_max_jobs_defaults = cWParams.commentedOrderedDict() max_jobs_per_entry_defaults = cWParams.commentedOrderedDict() max_jobs_per_entry_defaults["glideins"] = ( '10000', "nr", "Maximum number of concurrent glideins (per entry) that can be submitted.", None) max_jobs_per_entry_defaults["idle"] = ( '2000', "nr", "Maximum number of idle glideins (per entry) allowed.", None) max_jobs_per_entry_defaults["held"] = ( '1000', "nr", "Maximum number of held glideins (per entry) before forcing the cleanup.", None) entry_config_max_jobs_defaults[ 'per_entry'] = max_jobs_per_entry_defaults max_jobs_default_per_frontend_defaults = cWParams.commentedOrderedDict( ) max_jobs_default_per_frontend_defaults["glideins"] = ( '5000', "nr", "Maximum number of concurrent glideins (default per frontend) that can be submitted.", None) max_jobs_default_per_frontend_defaults["idle"] = ( '100', "nr", "Maximum number of idle glideins (default per frontend) allowed.", None) max_jobs_default_per_frontend_defaults["held"] = ( '50', "nr", "Maximum number of held glideins (default per frontend) before forcing the cleanup.", None) entry_config_max_jobs_defaults[ 'default_per_frontend'] = max_jobs_default_per_frontend_defaults max_jobs_per_frontend_defaults = cWParams.commentedOrderedDict() max_jobs_per_frontend_defaults["name"] = (None, "string", "frontend name", None) max_jobs_per_frontend_defaults["held"] = ( '50', "nr", "Maximum number of held glideins (for this frontend) before forcing the cleanup.", None) max_jobs_per_frontend_defaults["idle"] = ( '100', "nr", "Maximum number of idle glideins (for this frontend) allowed.", None) max_jobs_per_frontend_defaults["glideins"] = ( '5000', "nr", "Maximum number of concurrent glideins (per frontend) that can be submitted", None) entry_config_max_jobs_defaults["per_frontends"] = ( xmlParse.OrderedDict(), 'Dictionary of frontends', "Each frontend entry contains", max_jobs_per_frontend_defaults) entry_config_defaults['max_jobs'] = entry_config_max_jobs_defaults entry_config_restrictions_defaults = cWParams.commentedOrderedDict() entry_config_restrictions_defaults["require_voms_proxy"] = ( "False", "Bool", "Whether this entry point requires a voms proxy", None) entry_config_restrictions_defaults["require_glidein_glexec_use"] = ( "False", "Bool", "Whether this entry requires glidein to use glexec", None) entry_config_defaults[ 'restrictions'] = entry_config_restrictions_defaults entry_config_queue_defaults = cWParams.commentedOrderedDict() entry_config_queue_defaults["max_per_cycle"] = [ '100', "nr", "Maximum number of jobs affected per cycle.", None ] entry_config_queue_defaults["sleep"] = [ '0.2', "seconds", "Sleep between interactions with the schedd.", None ] entry_config_defaults['submit'] = copy.deepcopy( entry_config_queue_defaults) entry_config_defaults['submit']['cluster_size'] = [ '10', "nr", "Max number of jobs submitted in a single transaction.", None ] entry_config_defaults['submit']['slots_layout'] = [ 'partitionable', "string", "The way multiple slots should be setup.", None ] self.submit_attrs = cWParams.commentedOrderedDict() self.submit_attrs["value"] = ("All", "string", "HTCondor classad value", None) entry_config_defaults['submit']['submit_attrs'] = ( xmlParse.OrderedDict(), 'Dictionary of submit attributes', "Each attribute contains", self.submit_attrs) entry_config_defaults['remove'] = copy.deepcopy( entry_config_queue_defaults) entry_config_defaults['remove']['max_per_cycle'][0] = '5' entry_config_defaults['release'] = copy.deepcopy( entry_config_queue_defaults) entry_config_defaults['release']['max_per_cycle'][0] = '20' # not exported and order does not matter, can stay a regular dictionary sub_defaults = { 'attrs': (xmlParse.OrderedDict(), 'Dictionary of attributes', "Each attribute entry contains", self.attr_defaults), 'files': ([], 'List of files', "Each file entry contains", self.file_defaults), 'infosys_refs': ([], 'List of information system references', "Each reference points to this entry", self.infosys_defaults), 'monitorgroups': ([], 'List of monitoring groups', "Each group entry belongs to", self.mongroup_defaults) } self.entry_defaults = cWParams.commentedOrderedDict() self.entry_defaults["gatekeeper"] = (None, 'gatekeeper', 'Grid gatekeeper/resource', None) self.entry_defaults["gridtype"] = ('gt2', 'grid_type', 'Condor Grid type', None) self.entry_defaults["trust_domain"] = ('OSG', 'trust_domain', 'Entry trust domain', None) self.entry_defaults["auth_method"] = ( 'grid_proxy', 'auth_method', 'Type of auth method this entry supports', None) self.entry_defaults["vm_id"] = (None, 'vm_id', 'VM id this entry supports', None) self.entry_defaults["vm_type"] = (None, 'vm_type', 'VM type this entry supports', None) self.entry_defaults["rsl"] = (None, 'RSL', 'Globus gt2 RSL option', None) self.entry_defaults["bosco_dir"] = ( None, 'bosco_dir', 'BOSCO directory on the (remote) resource submit host', None) self.entry_defaults['schedd_name'] = ( None, "ScheddName", "Which schedd to use (Overrides the global one if specified)", None) self.entry_defaults["work_dir"] = (".", ".|Condor|OSG|TMPDIR", "Where to start glidein", None) self.entry_defaults['proxy_url'] = (None, 'proxy_url', "Squid cache to use", None) self.entry_defaults['verbosity'] = ( 'std', 'std|nodebug|fast', "Verbosity level and timeout setting", None) self.entry_defaults["enabled"] = ("True", "Bool", "Is this entry enabled?", None) self.entry_defaults["config"] = entry_config_defaults self.entry_defaults["attrs"] = sub_defaults['attrs'] self.entry_defaults["files"] = sub_defaults['files'] self.entry_defaults["infosys_refs"] = sub_defaults['infosys_refs'] self.entry_defaults["monitorgroups"] = copy.deepcopy( sub_defaults['monitorgroups']) self.entry_defaults["allow_frontends"] = ( xmlParse.OrderedDict(), 'Dictionary of frontends', "Each frontend entry contains", self.allow_defaults) ############################### # Start defining the defaults self.defaults["factory_name"] = (socket.gethostname(), 'ID', 'Factory name', None) self.defaults["glidein_name"] = (None, 'ID', 'Glidein name', None) self.defaults['schedd_name'] = ( "schedd_glideins@%s" % socket.gethostname(), "ScheddName", "Which schedd to use, can be a comma separated list", None) self.defaults['factory_collector'] = ( None, "CollectorName", "Which collector should we use for factory ClassAds", None) self.defaults['factory_versioning'] = ( 'True', 'Bool', 'Should we create versioned subdirectories?', None) submit_defaults = cWParams.commentedOrderedDict() submit_defaults["base_dir"] = ("%s/glideinsubmit" % os.environ["HOME"], "base_dir", "Submit base dir", None) submit_defaults["base_log_dir"] = ("%s/glideinlog" % os.environ["HOME"], "log_dir", "Submit base log dir", None) submit_defaults["base_client_log_dir"] = ( "%s/glideclientlog" % os.environ["HOME"], "client_dir", "Base dir for client logs, needs a user_<uid> subdir per frontend user", None) submit_defaults["base_client_proxies_dir"] = ( "%s/glideclientproxies" % os.environ["HOME"], "client_dir", "Base dir for client proxies, needs a user_<uid> subdir per frontend user", None) self.defaults["submit"] = submit_defaults one_log_retention_defaults = cWParams.commentedOrderedDict() one_log_retention_defaults["min_days"] = [ "3.0", "days", "Min number of days the logs must be preserved (even if they use too much space)", None ] one_log_retention_defaults["max_days"] = [ "7.0", "days", "Max number of days the logs should be preserved", None ] one_log_retention_defaults["max_mbytes"] = [ "100.0", "Mbytes", "Max number of Mbytes the logs can use", None ] monitor_footer_defaults = cWParams.commentedOrderedDict() monitor_footer_defaults["display_txt"] = [ "", "string", "what will be displayed at the bottom of the monitoring page", None ] monitor_footer_defaults["href_link"] = [ "", "string", "where to link to", None ] self.defaults["monitor_footer"] = monitor_footer_defaults process_log_defaults = copy.deepcopy(one_log_retention_defaults) process_log_defaults['extension'] = [ "all", "string", "name of the log extention", None ] process_log_defaults['msg_types'] = [ "INFO, WARN, ERR", "string", "types of log messages", None ] process_log_defaults['backup_count'] = [ "5", "string", "Number of backup logs to keep", None ] process_log_defaults['compression'] = [ "", "string", "Compression for backup log files", None ] log_retention_defaults = cWParams.commentedOrderedDict() log_retention_defaults["process_logs"] = ( [], 'Dictionary of log types', "Each log corresponds to a log file", copy.deepcopy(process_log_defaults)) log_retention_defaults["job_logs"] = copy.deepcopy( one_log_retention_defaults) log_retention_defaults["job_logs"]["min_days"][0] = "2.0" self.defaults['advertise_with_tcp'] = ( 'True', 'Bool', 'Should condor_advertise use TCP connections?', None) self.defaults['advertise_with_multiple'] = ( 'True', 'Bool', 'Should condor_advertise use -multiple?', None) log_retention_defaults["summary_logs"] = copy.deepcopy( one_log_retention_defaults) log_retention_defaults["summary_logs"]["max_days"][0] = "31.0" log_retention_defaults["condor_logs"] = copy.deepcopy( one_log_retention_defaults) log_retention_defaults["condor_logs"]["max_days"][0] = "14.0" self.defaults["log_retention"] = log_retention_defaults self.defaults['loop_delay'] = ('60', 'seconds', 'Number of seconds between iterations', None) self.defaults['advertise_delay'] = ('5', 'NR', 'Advertize evert NR loops', None) self.defaults['restart_attempts'] = ( '3', 'NR', 'Max allowed NR restarts every restart_interval before shutting down', None) self.defaults['restart_interval'] = ( '1800', 'NR', 'Time interval NR sec which allow max restart attempts', None) self.defaults['entry_parallel_workers'] = ( '0', 'NR', 'Number of entries that will perform the work in parallel', None) stage_defaults = cWParams.commentedOrderedDict() stage_defaults["base_dir"] = ("/var/www/html/glidefactory/stage", "base_dir", "Stage base dir", None) stage_defaults["web_base_url"] = ("http://%s/glidefactory/stage" % socket.gethostname(), 'base_url', 'Base Web server URL', None) stage_defaults["use_symlink"] = ( "True", "Bool", "Can I symlink stage dir from submit dir?", None) self.defaults["stage"] = stage_defaults self.monitor_defaults["base_dir"] = ( "/var/www/html/glidefactory/monitor", "base_dir", "Monitoring base dir", None) # Default for rrd update threads self.monitor_defaults["update_thread_count"] = ( os.sysconf('SC_NPROCESSORS_ONLN'), "update_thread_count", "Number of rrd update threads. Defaults to cpu count.", None) self.defaults["monitor"] = self.monitor_defaults self.frontend_sec_class_defaults = cWParams.commentedOrderedDict() self.frontend_sec_class_defaults["username"] = ( None, 'username', 'UNIX ID to be used for this security class', None) self.frontend_defaults = cWParams.commentedOrderedDict() self.frontend_defaults["identity"] = (None, 'identity', 'Authenticated Identity', None) self.frontend_defaults["security_classes"] = ( xmlParse.OrderedDict(), "Dictionary of security class maps", "Each mapping contains", self.frontend_sec_class_defaults) monitoring_collector_defaults = cWParams.commentedOrderedDict() monitoring_collector_defaults["node"] = ( None, "nodename", "Factory monitoring collector node name (for example, col1.my.org:9999)", None) monitoring_collector_defaults["DN"] = ( None, "dn", "Factory collector distinguised name (subject) (for example, /DC=org/DC=myca/OU=Services/CN=col1.my.org)", None) monitoring_collector_defaults["secondary"] = ( "False", "Bool", "Secondary nodes will be used by glideins, if present", None) monitoring_collector_defaults["group"] = ( "default", "string", "Collector group name useful to group HA setup", None) self.defaults["monitoring_collectors"] = ( [], 'List of factory monitoring collectors', "Each collector contains", monitoring_collector_defaults) security_default = cWParams.commentedOrderedDict() security_default["pub_key"] = ( "RSA", "None|RSA", "Type of public key system used for secure message passing", None) security_default["reuse_oldkey_onstartup_gracetime"] = ( "900", "seconds", "Time in sec old key can be used to decrypt requests from frontend", None) security_default["remove_old_cred_freq"] = ( "24", "hours", "Frequency in hrs for cleaning unused credentials", None) security_default["remove_old_cred_age"] = ( "30", "days", "Credentials older than this should be removed", None) security_default["key_length"] = ("2048", "bits", "Key length in bits", None) security_default["frontends"] = (xmlParse.OrderedDict(), "Dictionary of frontend", "Each frontend contains", self.frontend_defaults) self.defaults["security"] = security_default condor_defaults = cWParams.commentedOrderedDict() condor_defaults["os"] = ("default", "osname", "Operating System (like linux-rhel3)", None) condor_defaults["arch"] = ("default", "arch", "Architecture (like x86)", None) condor_defaults["version"] = ("default", "arch", "Architecture (like x86)", None) condor_defaults["tar_file"] = ( None, "fname", "Tarball containing condor binaries (overrides base_dir if defined)", None) condor_defaults["base_dir"] = ( None, "base_dir", "Condor distribution base dir (used only if tar_file undefined)", None) self.defaults["condor_tarballs"] = ([], 'List of condor tarballs', "Each entry contains", condor_defaults) self.defaults["attrs"] = sub_defaults['attrs'] self.defaults["files"] = copy.deepcopy(sub_defaults['files']) # ordering is specific to global section of factory self.defaults["files"][3]["after_entry"] = ( "False", 'Bool', 'Should this file be loaded after the entry ones?', None) self.defaults["entries"] = (xmlParse.OrderedDict(), "Dictionary of entries", "Each entry contains", self.entry_defaults) return