def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id) client_dir = client_id_dir(client_id) status = returnvalues.OK defaults = signature()[1] (validate_status, accepted) = validate_input_and_cert( user_arguments_dict, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: return (accepted, returnvalues.CLIENT_ERROR) if not safe_handler(configuration, 'post', op_name, client_id, get_csrf_limit(configuration), accepted): output_objects.append({ 'object_type': 'error_text', 'text': '''Only accepting CSRF-filtered POST requests to prevent unintended updates''' }) return (output_objects, returnvalues.CLIENT_ERROR) if not configuration.site_enable_jobs: output_objects.append({ 'object_type': 'error_text', 'text': '''Job execution is not enabled on this system''' }) return (output_objects, returnvalues.SYSTEM_ERROR) external_dict = get_keywords_dict(configuration) mrsl = fields_to_mrsl(configuration, user_arguments_dict, external_dict) tmpfile = None # save to temporary file try: (filehandle, real_path) = tempfile.mkstemp(text=True) relative_path = os.path.basename(real_path) os.write(filehandle, mrsl) os.close(filehandle) except Exception, err: output_objects.append({ 'object_type': 'error_text', 'text': 'Failed to write temporary mRSL file: %s' % err }) return (output_objects, returnvalues.SYSTEM_ERROR)
def has_default_value(configuration, mrsl_attribute, value): """Returns True/False to wether the mrsl value is the default.""" default_value = None keywords_dict = get_keywords_dict(configuration) if keywords_dict.has_key(mrsl_attribute): default_value = keywords_dict[mrsl_attribute].get('Value') return default_value == value
def signature(): defaults = {'job_id': REJECT_UNSET} configuration = get_configuration_object() external_dict = mrslkeywords.get_keywords_dict(configuration) for (key, value_dict) in external_dict.iteritems(): if not defaults.has_key(key): # do not overwrite defaults[key] = [] return ['html_form', defaults]
def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id) client_dir = client_id_dir(client_id) status = returnvalues.OK defaults = signature()[1] (validate_status, accepted) = validate_input_and_cert( user_arguments_dict, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: return (accepted, returnvalues.CLIENT_ERROR) if not correct_handler('POST'): output_objects.append( {'object_type': 'error_text', 'text' : 'Only accepting POST requests to prevent unintended updates'}) return (output_objects, returnvalues.CLIENT_ERROR) save_as_default = (accepted['save_as_default'][-1] != 'False') external_dict = get_keywords_dict(configuration) mrsl = fields_to_mrsl(configuration, user_arguments_dict, external_dict) tmpfile = None # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = os.path.abspath(os.path.join(configuration.user_home, client_dir)) + os.sep # save to temporary file try: (filehandle, real_path) = tempfile.mkstemp(text=True) relative_path = os.path.basename(real_path) os.write(filehandle, mrsl) os.close(filehandle) except Exception, err: output_objects.append({'object_type': 'error_text', 'text': 'Failed to write temporary mRSL file: %s' % \ err}) return (output_objects, returnvalues.SYSTEM_ERROR)
def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id, op_header=False, op_title=False) client_dir = client_id_dir(client_id) defaults = signature()[1] (validate_status, accepted) = validate_input_and_cert( user_arguments_dict, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: return (accepted, returnvalues.CLIENT_ERROR) job_id_list = accepted['job_id'] external_dict = mrslkeywords.get_keywords_dict(configuration) # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = \ os.path.abspath(os.path.join(configuration.mrsl_files_dir, client_dir)) + os.sep status = returnvalues.OK for job_id in job_id_list: # job = Job() filepath = os.path.join(base_dir, job_id) filepath += '.mRSL' (new_job_obj_status, new_job_obj) = \ create_job_object_from_pickled_mrsl(filepath, logger, external_dict) if not new_job_obj_status: output_objects.append({'object_type': 'error_text', 'text' : new_job_obj}) status = returnvalues.CLIENT_ERROR else: # return new_job_obj output_objects.append({'object_type': 'jobobj', 'jobobj' : new_job_obj}) return (output_objects, status)
def job_api_create(configuration, workflow_session, job_type=JOB, **job_attributes): """ Handler for 'create' calls to job API. :param configuration: The MiG configuration object. :param workflow_session: The MiG job session. This must contain the key 'owner' :param job_type: [optional] A MiG job type. Default is 'job'. :param job_attributes: dictionary of arguments used to create the job :return: Tuple (boolean, string) If a job can be created then a tuple is returned of first value True, and the created job's id in the second value. If it cannot be created then a tuple is returned with a first value of False, and an explanatory error message as the second value. """ _logger = configuration.logger client_id = workflow_session['owner'] external_dict = get_keywords_dict(configuration) if 'vgrid' not in job_attributes: msg = "Cannot create new job without specifying a %s (vgrid) for it " \ "to be attached to. " % configuration.site_vgrid_label return (False, msg) vgrid = job_attributes['vgrid'] # User is vgrid owner or member success, msg, _ = init_vgrid_script_list(vgrid, client_id, configuration) if not success: return (False, msg) job_attributes.pop('vgrid') mrsl = fields_to_mrsl(configuration, job_attributes, external_dict) tmpfile = None # save to temporary file try: (filehandle, real_path) = tempfile.mkstemp(text=True) os.write(filehandle, mrsl) os.close(filehandle) except Exception, err: msg = 'Failed to write temporary mRSL file: %s' % err _logger.error(msg) return (False, msg)
def mrsl_keywords(configuration, output_objects): """All job description keywords""" keywords_dict = mrslkeywords.get_keywords_dict(configuration) output_objects.append({'object_type': 'header', 'text' : 'Job description: mRSL'}) sorted_keys = keywords_dict.keys() sorted_keys.sort() for keyword in sorted_keys: info = keywords_dict[keyword] output_objects.append({'object_type': 'html_form', 'text' : "<div id='%s'></div>" % keyword}) output_objects.append({'object_type': 'sectionheader', 'text' : keyword}) entries = [] for (field, val) in info.items(): entries.append(field + ': ' + str(val)) output_objects.append({'object_type': 'list', 'list': entries})
def mrsl_keywords(configuration, output_objects): """All job description keywords""" keywords_dict = mrslkeywords.get_keywords_dict(configuration) output_objects.append({ 'object_type': 'header', 'text': 'Job description: mRSL' }) sorted_keys = keywords_dict.keys() sorted_keys.sort() for keyword in sorted_keys: info = keywords_dict[keyword] output_objects.append({ 'object_type': 'html_form', 'text': "<div id='%s'></div>" % keyword }) output_objects.append({ 'object_type': 'sectionheader', 'text': keyword }) entries = [] for (field, val) in info.items(): entries.append(field + ': ' + str(val)) output_objects.append({'object_type': 'list', 'list': entries})
def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id) client_dir = client_id_dir(client_id) defaults = signature()[1] (validate_status, accepted) = validate_input_and_cert( user_arguments_dict, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: return (accepted, returnvalues.CLIENT_ERROR) patterns = accepted['job_id'] if not safe_handler(configuration, 'post', op_name, client_id, get_csrf_limit(configuration), accepted): output_objects.append({ 'object_type': 'error_text', 'text': '''Only accepting CSRF-filtered POST requests to prevent unintended updates''' }) return (output_objects, returnvalues.CLIENT_ERROR) if not configuration.site_enable_jobs: output_objects.append({ 'object_type': 'error_text', 'text': '''Job execution is not enabled on this system''' }) return (output_objects, returnvalues.SYSTEM_ERROR) if not patterns: output_objects.append({ 'object_type': 'error_text', 'text': 'No job_id specified!' }) return (output_objects, returnvalues.NO_SUCH_JOB_ID) # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = \ os.path.abspath(os.path.join(configuration.mrsl_files_dir, client_dir)) + os.sep filelist = [] keywords_dict = mrslkeywords.get_keywords_dict(configuration) for pattern in patterns: pattern = pattern.strip() # Backward compatibility - all_jobs keyword should match all jobs if pattern == all_jobs: pattern = '*' # Check directory traversal attempts before actual handling to avoid # leaking information about file system layout while allowing # consistent error messages unfiltered_match = glob.glob(base_dir + pattern + '.mRSL') match = [] for server_path in unfiltered_match: # IMPORTANT: path must be expanded to abs for proper chrooting abs_path = os.path.abspath(server_path) if not valid_user_path(configuration, abs_path, base_dir, True): # out of bounds - save user warning for later to allow # partial match: # ../*/* is technically allowed to match own files. logger.warning('%s tried to %s restricted path %s ! (%s)' % (client_id, op_name, abs_path, pattern)) continue # Insert valid job files in filelist for later treatment match.append(abs_path) # Now actually treat list of allowed matchings and notify if no # (allowed) match if not match: output_objects.append({ 'object_type': 'error_text', 'text': '%s: You do not have any matching job IDs!' % pattern }) status = returnvalues.CLIENT_ERROR else: filelist += match # resubmit is hard on the server if len(filelist) > 100: output_objects.append({ 'object_type': 'error_text', 'text': 'Too many matching jobs (%s)!' % len(filelist) }) return (output_objects, returnvalues.CLIENT_ERROR) resubmitobjs = [] status = returnvalues.OK for filepath in filelist: mrsl_file = filepath.replace(base_dir, '') job_id = mrsl_file.replace('.mRSL', '') # ("Resubmitting job with job_id: %s" % job_id) resubmitobj = {'object_type': 'resubmitobj', 'job_id': job_id} mrsl_dict = unpickle(filepath, logger) if not mrsl_dict: resubmitobj['message'] = "No such job: %s (%s)" % (job_id, mrsl_file) status = returnvalues.CLIENT_ERROR resubmitobjs.append(resubmitobj) continue resubmit_items = keywords_dict.keys() # loop selected keywords and create mRSL string resubmit_job_string = '' for dict_elem in resubmit_items: value = '' # Extract job value with fallback to default to support optional # fields job_value = mrsl_dict.get(dict_elem, keywords_dict[dict_elem]['Value']) if keywords_dict[dict_elem]['Type'].startswith( 'multiplekeyvalues'): for (elem_key, elem_val) in job_value: if elem_key: value += '%s=%s\n' % (str(elem_key).strip(), str(elem_val).strip()) elif keywords_dict[dict_elem]['Type'].startswith('multiple'): for elem in job_value: if elem: value += '%s\n' % str(elem).rstrip() else: if str(job_value): value += '%s\n' % str(job_value).rstrip() # Only insert keywords with an associated value if value: if value.rstrip() != '': resubmit_job_string += '''::%s:: %s ''' % (dict_elem, value.rstrip()) # save tempfile (filehandle, tempfilename) = \ tempfile.mkstemp(dir=configuration.mig_system_files, text=True) os.write(filehandle, resubmit_job_string) os.close(filehandle) # submit job the usual way (new_job_status, msg, new_job_id) = new_job(tempfilename, client_id, configuration, False, True) if not new_job_status: resubmitobj['status'] = False resubmitobj['message'] = msg status = returnvalues.SYSTEM_ERROR resubmitobjs.append(resubmitobj) continue # o.out("Resubmit failed: %s" % msg) # o.reply_and_exit(o.ERROR) resubmitobj['status'] = True resubmitobj['new_job_id'] = new_job_id resubmitobjs.append(resubmitobj) # o.out("Resubmit successful: %s" % msg) # o.out("%s" % msg) output_objects.append({ 'object_type': 'resubmitobjs', 'resubmitobjs': resubmitobjs }) return (output_objects, status)
def parse( localfile_spaces, job_id, client_id, forceddestination, outfile='AUTOMATIC', ): """Parse job description and optionally write results to parsed mRSL file. If outfile is non-empty it is used as destination file, and the keyword AUTOMATIC is replaced by the default mrsl dir destination. """ configuration = get_configuration_object() logger = configuration.logger client_dir = client_id_dir(client_id) # return a tuple (bool status, str msg). This is done because cgi-scripts # are not allowed to print anything before 'the first two special lines' # are printed result = parser.parse(localfile_spaces) external_dict = mrslkeywords.get_keywords_dict(configuration) # The mRSL has the right structure check if the types are correct too # and inline update the default external_dict entries with the ones # from the actual job specification (status, msg) = parser.check_types(result, external_dict, configuration) if not status: return (False, 'Parse failed (typecheck) %s' % msg) logger.debug('check_types updated job dict to: %s' % external_dict) global_dict = {} # Insert the parts from mrslkeywords we need in the rest of the MiG system for (key, value_dict) in external_dict.iteritems(): global_dict[key] = value_dict['Value'] # We do not expand any job variables yet in order to allow any future # resubmits to properly expand job ID. vgrid_list = global_dict['VGRID'] allowed_vgrids = user_allowed_vgrids(configuration, client_id) # Replace any_vgrid keyword with all allowed vgrids (on time of submit!) try: any_pos = vgrid_list.index(any_vgrid) vgrid_list[any_pos:any_pos] = allowed_vgrids # Remove any additional any_vgrid keywords while any_vgrid in vgrid_list: vgrid_list.remove(any_vgrid) except ValueError: # No any_vgrid keywords in list - move along pass # Now validate supplied vgrids for vgrid_name in vgrid_list: if not vgrid_name in allowed_vgrids: return (False, """Failure: You must be an owner or member of the '%s' vgrid to submit a job to it!""" % vgrid_name) # Fall back to default vgrid if no vgrid was supplied if not vgrid_list: # Please note that vgrid_list is a ref to global_dict list # so we must modify and not replace with a new list! vgrid_list.append(default_vgrid) # convert specified runtime environments to upper-case and verify they # actually exist # do not check runtime envs if the job is for ARC (submission will # fail later) if global_dict.get('JOBTYPE', 'unset') != 'arc' \ and global_dict.has_key('RUNTIMEENVIRONMENT'): re_entries_uppercase = [] for specified_re in global_dict['RUNTIMEENVIRONMENT']: specified_re = specified_re.upper() re_entries_uppercase.append(specified_re) if not is_runtime_environment(specified_re, configuration): return (False, """You have specified a non-nexisting runtime environment '%s', therefore the job can not be run on any resources.""" % \ specified_re) if global_dict.get('MOUNT', []) != []: re_entries_uppercase.append(configuration.res_default_mount_re.upper()) global_dict['RUNTIMEENVIRONMENT'] = re_entries_uppercase if global_dict.get('JOBTYPE', 'unset').lower() == 'interactive': # if jobtype is interactive append command to create the notification # file .interactivejobfinished that breaks the infinite loop waiting # for the interactive job to finish and send output files to the MiG # server global_dict['EXECUTE'].append('touch .interactivejobfinished') # put job id and name of user in the dictionary global_dict['JOB_ID'] = job_id global_dict['USER_CERT'] = client_id # mark job as received global_dict['RECEIVED_TIMESTAMP'] = time.gmtime() global_dict['STATUS'] = 'PARSE' if forceddestination: global_dict['FORCEDDESTINATION'] = forceddestination if forceddestination.has_key('UNIQUE_RESOURCE_NAME'): global_dict["RESOURCE"] = "%(UNIQUE_RESOURCE_NAME)s_*" % \ forceddestination if forceddestination.has_key('RE_NAME'): re_name = forceddestination['RE_NAME'] # verify the verifyfiles entries are not modified (otherwise RE creator # can specify multiple ::VERIFYFILES:: keywords and give the entries # other names (perhaps overwriting files in the home directories of # resource owners executing the testprocedure) for verifyfile in global_dict['VERIFYFILES']: verifytypes = ['.status', '.stderr', '.stdout'] found = False for verifytype in verifytypes: if verifyfile == 'verify_runtime_env_%s%s' % (re_name, verifytype): found = True if not found: return (False, '''You are not allowed to specify the ::VERIFY:: keyword in a testprocedure, it is done automatically''') # normalize any path fields to be taken relative to home for field in ('INPUTFILES', 'OUTPUTFILES', 'EXECUTABLES', 'VERIFYFILES'): if not global_dict.has_key(field): continue normalized_field = [] for line in global_dict[field]: normalized_parts = [] line_parts = line.split() if len(line_parts) < 1 or len(line_parts) > 2: return (False, '%s entries must contain 1 or 2 space-separated items'\ % field) for part in line_parts: # deny leading slashes i.e. force absolute to relative paths part = part.lstrip('/') if part.find('://') != -1: # keep external targets as is - normpath breaks '://' normalized_parts.append(part) check_path = part.split('/')[-1] else: # normalize path to avoid e.g. './' which breaks dir # handling on resource check_path = os.path.normpath(part) normalized_parts.append(check_path) try: valid_path(check_path) except Exception, exc: return (False, 'Invalid %s part in %s: %s' % \ (field, html_escape(part), exc)) normalized_field.append(' '.join(normalized_parts)) global_dict[field] = normalized_field
def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id) client_dir = client_id_dir(client_id) status = returnvalues.OK defaults = signature()[1] (validate_status, accepted) = validate_input_and_cert( user_arguments_dict, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: return (accepted, returnvalues.CLIENT_ERROR) flags = accepted['flags'] patterns = accepted['job_id'] if not configuration.site_enable_jobs: output_objects.append({ 'object_type': 'error_text', 'text': '''Job execution is not enabled on this system''' }) return (output_objects, returnvalues.SYSTEM_ERROR) # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = \ os.path.abspath(os.path.join(configuration.mrsl_files_dir, client_dir)) + os.sep mrsl_keywords_dict = get_keywords_dict(configuration) if verbose(flags): for flag in flags: output_objects.append({ 'object_type': 'text', 'text': '%s using flag: %s' % (op_name, flag) }) for pattern in patterns: # Add file extension pattern += '.mRSL' # Check directory traversal attempts before actual handling to avoid # leaking information about file system layout while allowing # consistent error messages unfiltered_match = glob.glob(base_dir + pattern) match = [] for server_path in unfiltered_match: # IMPORTANT: path must be expanded to abs for proper chrooting abs_path = os.path.abspath(server_path) if not valid_user_path(configuration, abs_path, base_dir, True): # out of bounds - save user warning for later to allow # partial match: # ../*/* is technically allowed to match own files. logger.warning('%s tried to %s restricted path %s ! (%s)' % (client_id, op_name, abs_path, pattern)) continue match.append(abs_path) # Now actually treat list of allowed matchings and notify if no # (allowed) match if not match: output_objects.append({ 'object_type': 'file_not_found', 'name': pattern }) status = returnvalues.FILE_NOT_FOUND for abs_path in match: output_lines = [] relative_path = abs_path.replace(base_dir, '') try: mrsl_dict = unpickle(abs_path, logger) if not mrsl_dict: raise Exception('could not load job mRSL') for (key, val) in mrsl_dict.items(): if not key in mrsl_keywords_dict.keys(): continue if not val: continue output_lines.append('::%s::\n' % key) if 'multiplestrings' == mrsl_keywords_dict[key]['Type']: for line in val: output_lines.append('%s\n' % line) elif 'multiplekeyvalues' == mrsl_keywords_dict[key][ 'Type']: for (left, right) in val: output_lines.append('%s=%s\n' % (left, right)) else: output_lines.append('%s\n' % val) output_lines.append('\n') except Exception, exc: output_objects.append({ 'object_type': 'error_text', 'text': "%s: '%s': %s" % (op_name, relative_path, exc) }) logger.error("%s: failed on '%s': %s" % (op_name, relative_path, exc)) status = returnvalues.SYSTEM_ERROR continue if verbose(flags): output_objects.append({ 'object_type': 'file_output', 'path': relative_path, 'lines': output_lines }) else: output_objects.append({ 'object_type': 'file_output', 'lines': output_lines })
def handle_form_input(filenumber, user_arguments_dict, configuration): """Get keyword_FILENUMBER_X_Y from form and put it in mRSL format or write plain file """ file_type = '' output = '' keys = mrslkeywords.get_keywords_dict(configuration).keys() # FILE keyword used to indicate a plain file should be created keys.append('PLAINFILE') keys.append('FILEUPLOAD') for keyword in keys: counter_1 = -1 counter_2 = 0 end_with_newline = False while True: form_key = '%s_%s_%s_%s' % (keyword.lower(), filenumber, counter_1, counter_2 + 1) form_key_line = '%s_%s_%s_%s' % (keyword.lower(), filenumber, counter_1 + 1, counter_2) if user_arguments_dict.has_key(form_key): # Y increased, append value output += convert_control_value_to_line(form_key, user_arguments_dict) counter_2 += 1 elif user_arguments_dict.has_key(form_key_line): # X increased. If 0_0 write keyword. Write new line. if counter_1 == -1 and counter_2 == 0: if keyword == 'PLAINFILE': file_type = 'plain' elif keyword == 'FILEUPLOAD': file_type = 'fileupload' else: # write keyword the first time only output += '::%s::\n' % keyword end_with_newline = True output += '%s\n'\ % convert_control_value_to_line(form_key_line, user_arguments_dict) counter_1 += 1 counter_2 = 0 else: # X+1 or Y+1 not found, append newline if requested if end_with_newline: output += '\n' break return (output, file_type)
def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id) client_dir = client_id_dir(client_id) defaults = signature()[1] (validate_status, accepted) = validate_input_and_cert( user_arguments_dict, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: return (accepted, returnvalues.CLIENT_ERROR) if not correct_handler('POST'): output_objects.append( {'object_type': 'error_text', 'text' : 'Only accepting POST requests to prevent unintended updates'}) return (output_objects, returnvalues.CLIENT_ERROR) patterns = accepted['job_id'] if not patterns: output_objects.append({'object_type': 'error_text', 'text' : 'No job_id specified!'}) return (output_objects, returnvalues.NO_SUCH_JOB_ID) # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = \ os.path.abspath(os.path.join(configuration.mrsl_files_dir, client_dir)) + os.sep filelist = [] keywords_dict = mrslkeywords.get_keywords_dict(configuration) for pattern in patterns: pattern = pattern.strip() # Backward compatibility - all_jobs keyword should match all jobs if pattern == all_jobs: pattern = '*' # Check directory traversal attempts before actual handling to avoid # leaking information about file system layout while allowing # consistent error messages unfiltered_match = glob.glob(base_dir + pattern + '.mRSL') match = [] for server_path in unfiltered_match: real_path = os.path.abspath(server_path) if not valid_user_path(real_path, base_dir, True): # out of bounds - save user warning for later to allow # partial match: # ../*/* is technically allowed to match own files. logger.warning('%s tried to %s restricted path %s ! (%s)' % (client_id, op_name, real_path, pattern)) continue # Insert valid job files in filelist for later treatment match.append(real_path) # Now actually treat list of allowed matchings and notify if no # (allowed) match if not match: output_objects.append( {'object_type': 'error_text', 'text' : '%s: You do not have any matching job IDs!' % pattern}) status = returnvalues.CLIENT_ERROR else: filelist += match # resubmit is hard on the server if len(filelist) > 100: output_objects.append({'object_type': 'error_text', 'text' : 'Too many matching jobs (%s)!' % len(filelist)}) return (output_objects, returnvalues.CLIENT_ERROR) resubmitobjs = [] status = returnvalues.OK for filepath in filelist: mrsl_file = filepath.replace(base_dir, '') job_id = mrsl_file.replace('.mRSL', '') # ("Resubmitting job with job_id: %s" % job_id) resubmitobj = {'object_type': 'resubmitobj', 'job_id': job_id} mrsl_dict = unpickle(filepath, logger) if not mrsl_dict: resubmitobj['message'] = "No such job: %s (%s)" % (job_id, mrsl_file) status = returnvalues.CLIENT_ERROR resubmitobjs.append(resubmitobj) continue resubmit_items = keywords_dict.keys() # loop selected keywords and create mRSL string resubmit_job_string = '' for dict_elem in resubmit_items: value = '' # Extract job value with fallback to default to support optional # fields job_value = mrsl_dict.get(dict_elem, keywords_dict[dict_elem]['Value']) if keywords_dict[dict_elem]['Type'].startswith('multiplekeyvalues'): for (elem_key, elem_val) in job_value: if elem_key: value += '%s=%s\n' % (str(elem_key).strip(), str(elem_val).strip()) elif keywords_dict[dict_elem]['Type'].startswith('multiple'): for elem in job_value: if elem: value += '%s\n' % str(elem).rstrip() else: if str(job_value): value += '%s\n' % str(job_value).rstrip() # Only insert keywords with an associated value if value: if value.rstrip() != '': resubmit_job_string += '''::%s:: %s ''' % (dict_elem, value.rstrip()) # save tempfile (filehandle, tempfilename) = \ tempfile.mkstemp(dir=configuration.mig_system_files, text=True) os.write(filehandle, resubmit_job_string) os.close(filehandle) # submit job the usual way (new_job_status, msg, new_job_id) = new_job(tempfilename, client_id, configuration, False, True) if not new_job_status: resubmitobj['status'] = False resubmitobj['message'] = msg status = returnvalues.SYSTEM_ERROR resubmitobjs.append(resubmitobj) continue # o.out("Resubmit failed: %s" % msg) # o.reply_and_exit(o.ERROR) resubmitobj['status'] = True resubmitobj['new_job_id'] = new_job_id resubmitobjs.append(resubmitobj) # o.out("Resubmit successful: %s" % msg) # o.out("%s" % msg) output_objects.append({'object_type': 'resubmitobjs', 'resubmitobjs' : resubmitobjs}) return (output_objects, status)
def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id, op_header=False, op_title=False) client_dir = client_id_dir(client_id) defaults = signature()[1] (validate_status, accepted) = validate_input_and_cert( user_arguments_dict, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: return (accepted, returnvalues.CLIENT_ERROR) job_id_list = accepted['job_id'] external_dict = mrslkeywords.get_keywords_dict(configuration) if not configuration.site_enable_jobs: output_objects.append({ 'object_type': 'error_text', 'text': '''Job execution is not enabled on this system''' }) return (output_objects, returnvalues.SYSTEM_ERROR) # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = \ os.path.abspath(os.path.join(configuration.mrsl_files_dir, client_dir)) + os.sep status = returnvalues.OK for job_id in job_id_list: # job = Job() filepath = os.path.join(base_dir, job_id) filepath += '.mRSL' (new_job_obj_status, new_job_obj) = \ create_job_object_from_pickled_mrsl(filepath, logger, external_dict) if not new_job_obj_status: output_objects.append({ 'object_type': 'error_text', 'text': new_job_obj }) status = returnvalues.CLIENT_ERROR else: # return new_job_obj output_objects.append({ 'object_type': 'jobobj', 'jobobj': new_job_obj }) return (output_objects, status)
def parse( localfile_spaces, job_id, client_id, forceddestination, outfile='AUTOMATIC', ): """Parse job description and optionally write results to parsed mRSL file. If outfile is non-empty it is used as destination file, and the keyword AUTOMATIC is replaced by the default mrsl dir destination. """ configuration = get_configuration_object() logger = configuration.logger client_dir = client_id_dir(client_id) # return a tuple (bool status, str msg). This is done because cgi-scripts # are not allowed to print anything before 'the first two special lines' # are printed result = parser.parse(localfile_spaces) external_dict = mrslkeywords.get_keywords_dict(configuration) # The mRSL has the right structure check if the types are correct too # and inline update the default external_dict entries with the ones # from the actual job specification (status, msg) = parser.check_types(result, external_dict, configuration) if not status: return (False, 'Parse failed (typecheck) %s' % msg) logger.debug('check_types updated job dict to: %s' % external_dict) global_dict = {} # Insert the parts from mrslkeywords we need in the rest of the MiG system for (key, value_dict) in external_dict.iteritems(): global_dict[key] = value_dict['Value'] # We do not expand any job variables yet in order to allow any future # resubmits to properly expand job ID. vgrid_list = global_dict['VGRID'] vgrid_access = user_vgrid_access(configuration, client_id) # Replace any_vgrid keyword with all allowed vgrids (on time of submit!) try: any_pos = vgrid_list.index(any_vgrid) vgrid_list[any_pos:any_pos] = vgrid_access # Remove any additional any_vgrid keywords while any_vgrid in vgrid_list: vgrid_list.remove(any_vgrid) except ValueError: # No any_vgrid keywords in list - move along pass # Now validate supplied vgrids for vgrid_name in vgrid_list: if not vgrid_name in vgrid_access: return (False, """Failure: You must be an owner or member of the '%s' vgrid to submit a job to it!""" % vgrid_name) # Fall back to default vgrid if no vgrid was supplied if not vgrid_list: # Please note that vgrid_list is a ref to global_dict list # so we must modify and not replace with a new list! vgrid_list.append(default_vgrid) # convert specified runtime environments to upper-case and verify they # actually exist # do not check runtime envs if the job is for ARC (submission will # fail later) if global_dict.get('JOBTYPE', 'unset') != 'arc' \ and global_dict.has_key('RUNTIMEENVIRONMENT'): re_entries_uppercase = [] for specified_re in global_dict['RUNTIMEENVIRONMENT']: specified_re = specified_re.upper() re_entries_uppercase.append(specified_re) if not is_runtime_environment(specified_re, configuration): return (False, """You have specified a non-nexisting runtime environment '%s', therefore the job can not be run on any resources.""" % \ specified_re) if global_dict.get('MOUNT', []) != []: if configuration.res_default_mount_re.upper()\ not in re_entries_uppercase: re_entries_uppercase.append( configuration.res_default_mount_re.upper()) global_dict['RUNTIMEENVIRONMENT'] = re_entries_uppercase if global_dict.get('JOBTYPE', 'unset').lower() == 'interactive': # if jobtype is interactive append command to create the notification # file .interactivejobfinished that breaks the infinite loop waiting # for the interactive job to finish and send output files to the MiG # server global_dict['EXECUTE'].append('touch .interactivejobfinished') # put job id and name of user in the dictionary global_dict['JOB_ID'] = job_id global_dict['USER_CERT'] = client_id # mark job as received global_dict['RECEIVED_TIMESTAMP'] = time.gmtime() global_dict['STATUS'] = 'PARSE' if forceddestination: global_dict['FORCEDDESTINATION'] = forceddestination if forceddestination.has_key('UNIQUE_RESOURCE_NAME'): global_dict["RESOURCE"] = "%(UNIQUE_RESOURCE_NAME)s_*" % \ forceddestination if forceddestination.has_key('RE_NAME'): re_name = forceddestination['RE_NAME'] # verify the verifyfiles entries are not modified (otherwise RE creator # can specify multiple ::VERIFYFILES:: keywords and give the entries # other names (perhaps overwriting files in the home directories of # resource owners executing the testprocedure) for verifyfile in global_dict['VERIFYFILES']: verifytypes = ['.status', '.stderr', '.stdout'] found = False for verifytype in verifytypes: if verifyfile == 'verify_runtime_env_%s%s' % (re_name, verifytype): found = True if not found: return (False, '''You are not allowed to specify the ::VERIFY:: keyword in a testprocedure, it is done automatically''') # normalize any path fields to be taken relative to home for field in ('INPUTFILES', 'OUTPUTFILES', 'EXECUTABLES', 'VERIFYFILES'): if not global_dict.has_key(field): continue normalized_field = [] for line in global_dict[field]: normalized_parts = [] line_parts = line.split(src_dst_sep) if len(line_parts) < 1 or len(line_parts) > 2: return (False, '%s entries must contain 1 or 2 space-separated items'\ % field) for part in line_parts: # deny leading slashes i.e. force absolute to relative paths part = part.lstrip('/') if part.find('://') != -1: # keep external targets as is - normpath breaks '://' normalized_parts.append(part) check_path = part.split('/')[-1] else: # normalize path to avoid e.g. './' which breaks dir # handling on resource check_path = os.path.normpath(part) normalized_parts.append(check_path) try: valid_path(check_path) except Exception, exc: return (False, 'Invalid %s part in %s: %s' % \ (field, html_escape(part), exc)) normalized_field.append(' '.join(normalized_parts)) global_dict[field] = normalized_field
def handle_form_input(filenumber, user_arguments_dict, configuration): """Get keyword_FILENUMBER_X_Y from form and put it in mRSL format or write plain file """ file_type = '' output = '' keys = mrslkeywords.get_keywords_dict(configuration).keys() # FILE keyword used to indicate a plain file should be created keys.append('PLAINFILE') keys.append('FILEUPLOAD') for keyword in keys: counter_1 = -1 counter_2 = 0 end_with_newline = False while True: form_key = '%s_%s_%s_%s' % (keyword.lower(), filenumber, counter_1, counter_2 + 1) form_key_line = '%s_%s_%s_%s' % (keyword.lower(), filenumber, counter_1 + 1, counter_2) if user_arguments_dict.has_key(form_key): # Y increased, append value output += convert_control_value_to_line( form_key, user_arguments_dict) counter_2 += 1 elif user_arguments_dict.has_key(form_key_line): # X increased. If 0_0 write keyword. Write new line. if counter_1 == -1 and counter_2 == 0: if keyword == 'PLAINFILE': file_type = 'plain' elif keyword == 'FILEUPLOAD': file_type = 'fileupload' else: # write keyword the first time only output += '::%s::\n' % keyword end_with_newline = True output += '%s\n'\ % convert_control_value_to_line(form_key_line, user_arguments_dict) counter_1 += 1 counter_2 = 0 else: # X+1 or Y+1 not found, append newline if requested if end_with_newline: output += '\n' break return (output, file_type)