def create_runtimeenv(filename, client_id, configuration): """Create a new runtime environment""" result = parser.parse(filename) external_dict = rekeywords.get_keywords_dict() (status, parsemsg) = parser.check_types(result, external_dict, configuration) try: os.remove(filename) except Exception, err: msg = \ 'Exception removing temporary runtime environment file %s, %s'\ % (filename, err)
def parse_and_save_pickle(source, destination, keywords, client_id, configuration, strip_space, strip_comments): """Use conf parser to parse settings in mRSL file and save resulting dictionary in a pickled file in user_settings. """ client_dir = client_id_dir(client_id) result = parser.parse(source, strip_space, strip_comments) (status, parsemsg) = parser.check_types(result, keywords, configuration) try: os.remove(source) except Exception, err: msg = 'Exception removing temporary file %s, %s'\ % (source, err)
def get_resource_config_dict(config_file): """Find and return configuration dictionary in provided conf file""" result = parser.parse(config_file) external_dict = resconfkeywords.get_keywords_dict(configuration) # The Configfile has the right structure # Check if the types are correct too (status, msg) = parser.check_types(result, external_dict, configuration) if not status: return (False, 'Parse failed (typecheck) ' + msg, external_dict) global_dict = {} for (key, value_dict) in external_dict.iteritems(): global_dict[key] = value_dict['Value'] return (status, msg, global_dict)
def get_resource_config_dict(configuration, config_file): """Find and return configuration dictionary in provided conf file""" if not configuration: configuration = get_configuration_object() result = parser.parse(config_file) external_dict = resconfkeywords.get_keywords_dict(configuration) # The Configfile has the right structure # Check if the types are correct too (status, msg) = parser.check_types(result, external_dict, configuration) if not status: return (False, 'Parse failed (typecheck) ' + msg, external_dict) global_dict = {} for (key, value_dict) in external_dict.iteritems(): global_dict[key] = value_dict['Value'] return (status, msg, global_dict)
def parse( localfile_spaces, job_id, client_id, forceddestination, outfile='AUTOMATIC', ): """Parse job description and optionally write results to parsed mRSL file. If outfile is non-empty it is used as destination file, and the keyword AUTOMATIC is replaced by the default mrsl dir destination. """ configuration = get_configuration_object() logger = configuration.logger client_dir = client_id_dir(client_id) # return a tuple (bool status, str msg). This is done because cgi-scripts # are not allowed to print anything before 'the first two special lines' # are printed result = parser.parse(localfile_spaces) external_dict = mrslkeywords.get_keywords_dict(configuration) # The mRSL has the right structure check if the types are correct too # and inline update the default external_dict entries with the ones # from the actual job specification (status, msg) = parser.check_types(result, external_dict, configuration) if not status: return (False, 'Parse failed (typecheck) %s' % msg) logger.debug('check_types updated job dict to: %s' % external_dict) global_dict = {} # Insert the parts from mrslkeywords we need in the rest of the MiG system for (key, value_dict) in external_dict.iteritems(): global_dict[key] = value_dict['Value'] # We do not expand any job variables yet in order to allow any future # resubmits to properly expand job ID. vgrid_list = global_dict['VGRID'] vgrid_access = user_vgrid_access(configuration, client_id) # Replace any_vgrid keyword with all allowed vgrids (on time of submit!) try: any_pos = vgrid_list.index(any_vgrid) vgrid_list[any_pos:any_pos] = vgrid_access # Remove any additional any_vgrid keywords while any_vgrid in vgrid_list: vgrid_list.remove(any_vgrid) except ValueError: # No any_vgrid keywords in list - move along pass # Now validate supplied vgrids for vgrid_name in vgrid_list: if not vgrid_name in vgrid_access: return (False, """Failure: You must be an owner or member of the '%s' vgrid to submit a job to it!""" % vgrid_name) # Fall back to default vgrid if no vgrid was supplied if not vgrid_list: # Please note that vgrid_list is a ref to global_dict list # so we must modify and not replace with a new list! vgrid_list.append(default_vgrid) # convert specified runtime environments to upper-case and verify they # actually exist # do not check runtime envs if the job is for ARC (submission will # fail later) if global_dict.get('JOBTYPE', 'unset') != 'arc' \ and global_dict.has_key('RUNTIMEENVIRONMENT'): re_entries_uppercase = [] for specified_re in global_dict['RUNTIMEENVIRONMENT']: specified_re = specified_re.upper() re_entries_uppercase.append(specified_re) if not is_runtime_environment(specified_re, configuration): return (False, """You have specified a non-nexisting runtime environment '%s', therefore the job can not be run on any resources.""" % \ specified_re) if global_dict.get('MOUNT', []) != []: if configuration.res_default_mount_re.upper()\ not in re_entries_uppercase: re_entries_uppercase.append( configuration.res_default_mount_re.upper()) global_dict['RUNTIMEENVIRONMENT'] = re_entries_uppercase if global_dict.get('JOBTYPE', 'unset').lower() == 'interactive': # if jobtype is interactive append command to create the notification # file .interactivejobfinished that breaks the infinite loop waiting # for the interactive job to finish and send output files to the MiG # server global_dict['EXECUTE'].append('touch .interactivejobfinished') # put job id and name of user in the dictionary global_dict['JOB_ID'] = job_id global_dict['USER_CERT'] = client_id # mark job as received global_dict['RECEIVED_TIMESTAMP'] = time.gmtime() global_dict['STATUS'] = 'PARSE' if forceddestination: global_dict['FORCEDDESTINATION'] = forceddestination if forceddestination.has_key('UNIQUE_RESOURCE_NAME'): global_dict["RESOURCE"] = "%(UNIQUE_RESOURCE_NAME)s_*" % \ forceddestination if forceddestination.has_key('RE_NAME'): re_name = forceddestination['RE_NAME'] # verify the verifyfiles entries are not modified (otherwise RE creator # can specify multiple ::VERIFYFILES:: keywords and give the entries # other names (perhaps overwriting files in the home directories of # resource owners executing the testprocedure) for verifyfile in global_dict['VERIFYFILES']: verifytypes = ['.status', '.stderr', '.stdout'] found = False for verifytype in verifytypes: if verifyfile == 'verify_runtime_env_%s%s' % (re_name, verifytype): found = True if not found: return (False, '''You are not allowed to specify the ::VERIFY:: keyword in a testprocedure, it is done automatically''') # normalize any path fields to be taken relative to home for field in ('INPUTFILES', 'OUTPUTFILES', 'EXECUTABLES', 'VERIFYFILES'): if not global_dict.has_key(field): continue normalized_field = [] for line in global_dict[field]: normalized_parts = [] line_parts = line.split(src_dst_sep) if len(line_parts) < 1 or len(line_parts) > 2: return (False, '%s entries must contain 1 or 2 space-separated items'\ % field) for part in line_parts: # deny leading slashes i.e. force absolute to relative paths part = part.lstrip('/') if part.find('://') != -1: # keep external targets as is - normpath breaks '://' normalized_parts.append(part) check_path = part.split('/')[-1] else: # normalize path to avoid e.g. './' which breaks dir # handling on resource check_path = os.path.normpath(part) normalized_parts.append(check_path) try: valid_path(check_path) except Exception, exc: return (False, 'Invalid %s part in %s: %s' % \ (field, html_escape(part), exc)) normalized_field.append(' '.join(normalized_parts)) global_dict[field] = normalized_field
def parse( localfile_spaces, job_id, client_id, forceddestination, outfile='AUTOMATIC', ): """Parse job description and optionally write results to parsed mRSL file. If outfile is non-empty it is used as destination file, and the keyword AUTOMATIC is replaced by the default mrsl dir destination. """ configuration = get_configuration_object() logger = configuration.logger client_dir = client_id_dir(client_id) # return a tuple (bool status, str msg). This is done because cgi-scripts # are not allowed to print anything before 'the first two special lines' # are printed result = parser.parse(localfile_spaces) external_dict = mrslkeywords.get_keywords_dict(configuration) # The mRSL has the right structure check if the types are correct too # and inline update the default external_dict entries with the ones # from the actual job specification (status, msg) = parser.check_types(result, external_dict, configuration) if not status: return (False, 'Parse failed (typecheck) %s' % msg) logger.debug('check_types updated job dict to: %s' % external_dict) global_dict = {} # Insert the parts from mrslkeywords we need in the rest of the MiG system for (key, value_dict) in external_dict.iteritems(): global_dict[key] = value_dict['Value'] # We do not expand any job variables yet in order to allow any future # resubmits to properly expand job ID. vgrid_list = global_dict['VGRID'] allowed_vgrids = user_allowed_vgrids(configuration, client_id) # Replace any_vgrid keyword with all allowed vgrids (on time of submit!) try: any_pos = vgrid_list.index(any_vgrid) vgrid_list[any_pos:any_pos] = allowed_vgrids # Remove any additional any_vgrid keywords while any_vgrid in vgrid_list: vgrid_list.remove(any_vgrid) except ValueError: # No any_vgrid keywords in list - move along pass # Now validate supplied vgrids for vgrid_name in vgrid_list: if not vgrid_name in allowed_vgrids: return (False, """Failure: You must be an owner or member of the '%s' vgrid to submit a job to it!""" % vgrid_name) # Fall back to default vgrid if no vgrid was supplied if not vgrid_list: # Please note that vgrid_list is a ref to global_dict list # so we must modify and not replace with a new list! vgrid_list.append(default_vgrid) # convert specified runtime environments to upper-case and verify they # actually exist # do not check runtime envs if the job is for ARC (submission will # fail later) if global_dict.get('JOBTYPE', 'unset') != 'arc' \ and global_dict.has_key('RUNTIMEENVIRONMENT'): re_entries_uppercase = [] for specified_re in global_dict['RUNTIMEENVIRONMENT']: specified_re = specified_re.upper() re_entries_uppercase.append(specified_re) if not is_runtime_environment(specified_re, configuration): return (False, """You have specified a non-nexisting runtime environment '%s', therefore the job can not be run on any resources.""" % \ specified_re) if global_dict.get('MOUNT', []) != []: re_entries_uppercase.append(configuration.res_default_mount_re.upper()) global_dict['RUNTIMEENVIRONMENT'] = re_entries_uppercase if global_dict.get('JOBTYPE', 'unset').lower() == 'interactive': # if jobtype is interactive append command to create the notification # file .interactivejobfinished that breaks the infinite loop waiting # for the interactive job to finish and send output files to the MiG # server global_dict['EXECUTE'].append('touch .interactivejobfinished') # put job id and name of user in the dictionary global_dict['JOB_ID'] = job_id global_dict['USER_CERT'] = client_id # mark job as received global_dict['RECEIVED_TIMESTAMP'] = time.gmtime() global_dict['STATUS'] = 'PARSE' if forceddestination: global_dict['FORCEDDESTINATION'] = forceddestination if forceddestination.has_key('UNIQUE_RESOURCE_NAME'): global_dict["RESOURCE"] = "%(UNIQUE_RESOURCE_NAME)s_*" % \ forceddestination if forceddestination.has_key('RE_NAME'): re_name = forceddestination['RE_NAME'] # verify the verifyfiles entries are not modified (otherwise RE creator # can specify multiple ::VERIFYFILES:: keywords and give the entries # other names (perhaps overwriting files in the home directories of # resource owners executing the testprocedure) for verifyfile in global_dict['VERIFYFILES']: verifytypes = ['.status', '.stderr', '.stdout'] found = False for verifytype in verifytypes: if verifyfile == 'verify_runtime_env_%s%s' % (re_name, verifytype): found = True if not found: return (False, '''You are not allowed to specify the ::VERIFY:: keyword in a testprocedure, it is done automatically''') # normalize any path fields to be taken relative to home for field in ('INPUTFILES', 'OUTPUTFILES', 'EXECUTABLES', 'VERIFYFILES'): if not global_dict.has_key(field): continue normalized_field = [] for line in global_dict[field]: normalized_parts = [] line_parts = line.split() if len(line_parts) < 1 or len(line_parts) > 2: return (False, '%s entries must contain 1 or 2 space-separated items'\ % field) for part in line_parts: # deny leading slashes i.e. force absolute to relative paths part = part.lstrip('/') if part.find('://') != -1: # keep external targets as is - normpath breaks '://' normalized_parts.append(part) check_path = part.split('/')[-1] else: # normalize path to avoid e.g. './' which breaks dir # handling on resource check_path = os.path.normpath(part) normalized_parts.append(check_path) try: valid_path(check_path) except Exception, exc: return (False, 'Invalid %s part in %s: %s' % \ (field, html_escape(part), exc)) normalized_field.append(' '.join(normalized_parts)) global_dict[field] = normalized_field