def enqueue_vm(client_id, configuration, machine_name, machine_req): """Submit a machine job based on machine definition file and overrides from machine_req. Returns the job submit result, a 3-tuple of (status, msg, job_id) """ specs = default_vm_specs(configuration) specs.update(machine_req) # Setup paths - filter above prevents directory traversal client_dir = client_id_dir(client_id) user_home = os.path.abspath(os.path.join(configuration.user_home, client_dir)) user_vms_home = os.path.join(user_home, vm_base) vm_home = os.path.join(user_vms_home, machine_name) location_fd = open(os.path.join(vm_home, sys_location), 'r') (sys_re, sys_base, sys_disk) = location_fd.read().split(':') location_fd.close() data_disk = '%(os)s-%(vm_arch)s-data.%(disk_format)s' % specs run_script = 'run%(hypervisor)svm.sh' % specs specs.update({'name': machine_name, 'data_disk': data_disk, 'run_script': run_script, 'vm_base': vm_base, 'sys_re': sys_re, 'sys_base': sys_base, 'sys_disk': sys_disk}) if specs['hypervisor_re']: specs['runtime_env'].append(specs['hypervisor_re']) if specs['sys_re']: specs['runtime_env'].append(specs['sys_re']) # Generate the mrsl and write to a temp file which is removed on close mrsl = mig_vbox_deploy_job(client_id, configuration, machine_name, specs) mrsl_fd = NamedTemporaryFile() mrsl_fd.write(mrsl) mrsl_fd.flush() # Submit job and clean up res = new_job(mrsl_fd.name, client_id, configuration, False, True) mrsl_fd.close() return res
relative_path = os.path.basename(real_path) os.write(filehandle, mrsl) os.close(filehandle) except Exception, err: output_objects.append({'object_type': 'error_text', 'text': 'Failed to write temporary mRSL file: %s' % \ err}) return (output_objects, returnvalues.SYSTEM_ERROR) # submit it submitstatuslist = [] submitstatus = {'object_type': 'submitstatus', 'name': relative_path} try: (job_status, newmsg, job_id) = new_job(real_path, client_id, configuration, False, True) except Exception, exc: logger.error("%s: failed on '%s': %s" % (op_name, relative_path, exc)) job_status = False newmsg = "%s failed on '%s' (invalid mRSL?)"\ % (op_name, relative_path) job_id = None if not job_status: # output_objects.append({"object_type":"error_text", "text":"%s" % newmsg}) submitstatus['status'] = False submitstatus['message'] = newmsg status = returnvalues.CLIENT_ERROR else:
# submit mrsl files to the parser. It should be done from within this # function to keep the right order if multiple files are created in the # html form. submitstatuslist = [] if configuration.site_enable_jobs and submit_mrslfiles: # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = \ os.path.abspath(os.path.join(configuration.user_home, client_dir)) + os.sep for mrslfile in mrslfiles_to_parse: (job_status, parse_msg, job_id) = new_job(mrslfile, client_id, configuration, False, True) relative_filename = os.sep + mrslfile.replace(base_dir, '') submitstatus = {'object_type': 'submitstatus', 'name': relative_filename} if not job_status: submitstatus['status'] = False submitstatus['job_id'] = job_id submitstatus['message'] = parse_msg else: # msg += "<h2>Failure</h2><br>\n" # msg += parse_msg # return(False, msg) submitstatus['status'] = True submitstatus['job_id'] = job_id
def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id, op_header=False) title_entry = find_entry(output_objects, 'title') title_entry['text'] = 'Runtime env support' output_objects.append({ 'object_type': 'header', 'text': 'Test runtime environment support' }) client_dir = client_id_dir(client_id) defaults = signature()[1] (validate_status, accepted) = validate_input_and_cert( user_arguments_dict, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: logger.warning('%s invalid input: %s' % (op_name, accepted)) return (accepted, returnvalues.CLIENT_ERROR) resource_list = accepted['unique_resource_name'] re_name = accepted['re_name'][-1] status = returnvalues.OK visible_res = user_visible_res_confs(configuration, client_id) if not safe_handler(configuration, 'post', op_name, client_id, get_csrf_limit(configuration), accepted): output_objects.append({ 'object_type': 'error_text', 'text': '''Only accepting CSRF-filtered POST requests to prevent unintended updates''' }) return (output_objects, returnvalues.CLIENT_ERROR) if not re_name: output_objects.append({ 'object_type': 'error_text', 'text': 'Please specify the name of the runtime environment!' }) return (output_objects, returnvalues.CLIENT_ERROR) if not valid_dir_input(configuration.re_home, re_name): logger.warning( "possible illegal directory traversal attempt re_name '%s'" % re_name) output_objects.append({ 'object_type': 'error_text', 'text': 'Illegal runtime environment name: "%s"' % re_name }) return (output_objects, returnvalues.CLIENT_ERROR) # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = os.path.abspath( os.path.join(configuration.user_home, client_dir)) + os.sep for visible_res_name in resource_list: if not visible_res_name in visible_res.keys(): logger.warning('User %s not allowed to view %s (%s)' % \ (client_id, visible_res_name, visible_res.keys())) output_objects.append({'object_type': 'error_text', 'text': 'invalid resource %s' % \ visible_res_name}) status = returnvalues.CLIENT_ERROR continue if not is_owner(client_id, visible_res_name, configuration.resource_home, logger): output_objects.append({ 'object_type': 'error_text', 'text': 'You must be an owner of the resource to validate runtime ' 'environment support. (resource %s)' % visible_res_name }) status = returnvalues.CLIENT_ERROR continue (re_dict, re_msg) = get_re_dict(re_name, configuration) if not re_dict: output_objects.append({ 'object_type': 'error_text', 'text': 'Could not get re_dict %s' % re_msg }) status = returnvalues.SYSTEM_ERROR continue if not testresource_has_re_specified(visible_res_name, re_name, configuration): output_objects.append({ 'object_type': 'error_text', 'text': 'You must specify the runtime environment in the resource' 'configuration before verifying if it is supported!' }) status = returnvalues.CLIENT_ERROR continue base64string = '' for stringpart in re_dict['TESTPROCEDURE']: base64string += stringpart mrslfile_content = base64.decodestring(base64string) try: (filehandle, mrslfile) = tempfile.mkstemp(text=True) os.write(filehandle, mrslfile_content) os.close(filehandle) create_verify_files(['status', 'stdout', 'stderr'], re_name, re_dict, base_dir, logger) except Exception, exc: output_objects.append({ 'object_type': 'error_text', 'text': 'Could not write test job for %s: %s' % (visible_res_name, exc) }) status = returnvalues.SYSTEM_ERROR continue forceddestination_dict = { 'UNIQUE_RESOURCE_NAME': visible_res_name, 'RE_NAME': re_name } (success, msg) = new_job(mrslfile, client_id, configuration, forceddestination_dict) if not success: output_objects.append({ 'object_type': 'error_text', 'text': 'Submit test job failed %s: %s' % (visible_res_name, msg) }) status = returnvalues.SYSTEM_ERROR try: os.remove(mrslfile) except: pass output_objects.append( {'object_type': 'text', 'text': 'Runtime environment test job for %s successfuly submitted! %s' \ % (visible_res_name, msg)})
# submit mrsl files to the parser. It should be done from within this # function to keep the right order if multiple files are created in the # html form. submitstatuslist = [] if submit_mrslfiles: # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = \ os.path.abspath(os.path.join(configuration.user_home, client_dir)) + os.sep for mrslfile in mrslfiles_to_parse: (job_status, parse_msg, job_id) = new_job(mrslfile, client_id, configuration, False, True) relative_filename = os.sep + mrslfile.replace(base_dir, '') submitstatus = {'object_type': 'submitstatus', 'name': relative_filename} if not job_status: submitstatus['status'] = False submitstatus['job_id'] = job_id submitstatus['message'] = parse_msg else: # msg += "<h2>Failure</h2><br>\n" # msg += parse_msg # return(False, msg) submitstatus['status'] = True submitstatus['job_id'] = job_id
def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id) client_dir = client_id_dir(client_id) status = returnvalues.OK defaults = signature()[1] (validate_status, accepted) = validate_input_and_cert( user_arguments_dict, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: return (accepted, returnvalues.CLIENT_ERROR) flags = ''.join(accepted['flags']) patterns = accepted['path'] if not safe_handler(configuration, 'post', op_name, client_id, get_csrf_limit(configuration), accepted): output_objects.append({ 'object_type': 'error_text', 'text': '''Only accepting CSRF-filtered POST requests to prevent unintended updates''' }) return (output_objects, returnvalues.CLIENT_ERROR) if not configuration.site_enable_jobs: output_objects.append({ 'object_type': 'error_text', 'text': '''Job execution is not enabled on this system''' }) return (output_objects, returnvalues.SYSTEM_ERROR) # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = os.path.abspath( os.path.join(configuration.user_home, client_dir)) + os.sep if verbose(flags): for flag in flags: output_objects.append({ 'object_type': 'text', 'text': '%s using flag: %s' % (op_name, flag) }) for pattern in patterns: # Check directory traversal attempts before actual handling to avoid # leaking information about file system layout while allowing # consistent error messages unfiltered_match = glob.glob(base_dir + pattern) match = [] for server_path in unfiltered_match: # IMPORTANT: path must be expanded to abs for proper chrooting abs_path = os.path.abspath(server_path) if not valid_user_path(configuration, abs_path, base_dir, True): # out of bounds - save user warning for later to allow # partial match: # ../*/* is technically allowed to match own files. logger.warning('%s tried to %s restricted path %s ! (%s)' % (client_id, op_name, abs_path, pattern)) continue match.append(abs_path) # Now actually treat list of allowed matchings and notify if no # (allowed) match if not match: output_objects.append({ 'object_type': 'file_not_found', 'name': pattern }) status = returnvalues.FILE_NOT_FOUND submitstatuslist = [] for abs_path in match: output_lines = [] relative_path = abs_path.replace(base_dir, '') submitstatus = { 'object_type': 'submitstatus', 'name': relative_path } try: (job_status, newmsg, job_id) = new_job(abs_path, client_id, configuration, False, True) except Exception, exc: logger.error("%s: failed on '%s': %s" % (op_name, relative_path, exc)) job_status = False newmsg = "%s failed on '%s' (is it a valid mRSL file?)"\ % (op_name, relative_path) job_id = None if not job_status: submitstatus['status'] = False submitstatus['message'] = newmsg status = returnvalues.CLIENT_ERROR else: submitstatus['status'] = True submitstatus['job_id'] = job_id submitstatuslist.append(submitstatus) output_objects.append({ 'object_type': 'submitstatuslist', 'submitstatuslist': submitstatuslist })
# save to temporary file try: (filehandle, real_path) = tempfile.mkstemp(text=True) relative_path = os.path.basename(real_path) os.write(filehandle, mrsl) os.close(filehandle) except Exception, err: output_objects.append({'object_type': 'error_text', 'text' : 'Failed to write temporary mRSL file: %s' % err}) return (output_objects, returnvalues.SYSTEM_ERROR) # submit it (submit_status, newmsg, job_id) = new_job(real_path, client_id, configuration, False, True) if not submit_status: output_objects.append({'object_type': 'error_text', 'text' : newmsg}) return (output_objects, returnvalues.CLIENT_ERROR) # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = \ os.path.abspath(os.path.join(configuration.mrsl_files_dir, client_dir)) + os.sep # job = Job() filepath = os.path.join(base_dir, job_id)
def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id) client_dir = client_id_dir(client_id) defaults = signature()[1] (validate_status, accepted) = validate_input_and_cert( user_arguments_dict, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: return (accepted, returnvalues.CLIENT_ERROR) patterns = accepted['job_id'] if not safe_handler(configuration, 'post', op_name, client_id, get_csrf_limit(configuration), accepted): output_objects.append({ 'object_type': 'error_text', 'text': '''Only accepting CSRF-filtered POST requests to prevent unintended updates''' }) return (output_objects, returnvalues.CLIENT_ERROR) if not configuration.site_enable_jobs: output_objects.append({ 'object_type': 'error_text', 'text': '''Job execution is not enabled on this system''' }) return (output_objects, returnvalues.SYSTEM_ERROR) if not patterns: output_objects.append({ 'object_type': 'error_text', 'text': 'No job_id specified!' }) return (output_objects, returnvalues.NO_SUCH_JOB_ID) # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = \ os.path.abspath(os.path.join(configuration.mrsl_files_dir, client_dir)) + os.sep filelist = [] keywords_dict = mrslkeywords.get_keywords_dict(configuration) for pattern in patterns: pattern = pattern.strip() # Backward compatibility - all_jobs keyword should match all jobs if pattern == all_jobs: pattern = '*' # Check directory traversal attempts before actual handling to avoid # leaking information about file system layout while allowing # consistent error messages unfiltered_match = glob.glob(base_dir + pattern + '.mRSL') match = [] for server_path in unfiltered_match: # IMPORTANT: path must be expanded to abs for proper chrooting abs_path = os.path.abspath(server_path) if not valid_user_path(configuration, abs_path, base_dir, True): # out of bounds - save user warning for later to allow # partial match: # ../*/* is technically allowed to match own files. logger.warning('%s tried to %s restricted path %s ! (%s)' % (client_id, op_name, abs_path, pattern)) continue # Insert valid job files in filelist for later treatment match.append(abs_path) # Now actually treat list of allowed matchings and notify if no # (allowed) match if not match: output_objects.append({ 'object_type': 'error_text', 'text': '%s: You do not have any matching job IDs!' % pattern }) status = returnvalues.CLIENT_ERROR else: filelist += match # resubmit is hard on the server if len(filelist) > 100: output_objects.append({ 'object_type': 'error_text', 'text': 'Too many matching jobs (%s)!' % len(filelist) }) return (output_objects, returnvalues.CLIENT_ERROR) resubmitobjs = [] status = returnvalues.OK for filepath in filelist: mrsl_file = filepath.replace(base_dir, '') job_id = mrsl_file.replace('.mRSL', '') # ("Resubmitting job with job_id: %s" % job_id) resubmitobj = {'object_type': 'resubmitobj', 'job_id': job_id} mrsl_dict = unpickle(filepath, logger) if not mrsl_dict: resubmitobj['message'] = "No such job: %s (%s)" % (job_id, mrsl_file) status = returnvalues.CLIENT_ERROR resubmitobjs.append(resubmitobj) continue resubmit_items = keywords_dict.keys() # loop selected keywords and create mRSL string resubmit_job_string = '' for dict_elem in resubmit_items: value = '' # Extract job value with fallback to default to support optional # fields job_value = mrsl_dict.get(dict_elem, keywords_dict[dict_elem]['Value']) if keywords_dict[dict_elem]['Type'].startswith( 'multiplekeyvalues'): for (elem_key, elem_val) in job_value: if elem_key: value += '%s=%s\n' % (str(elem_key).strip(), str(elem_val).strip()) elif keywords_dict[dict_elem]['Type'].startswith('multiple'): for elem in job_value: if elem: value += '%s\n' % str(elem).rstrip() else: if str(job_value): value += '%s\n' % str(job_value).rstrip() # Only insert keywords with an associated value if value: if value.rstrip() != '': resubmit_job_string += '''::%s:: %s ''' % (dict_elem, value.rstrip()) # save tempfile (filehandle, tempfilename) = \ tempfile.mkstemp(dir=configuration.mig_system_files, text=True) os.write(filehandle, resubmit_job_string) os.close(filehandle) # submit job the usual way (new_job_status, msg, new_job_id) = new_job(tempfilename, client_id, configuration, False, True) if not new_job_status: resubmitobj['status'] = False resubmitobj['message'] = msg status = returnvalues.SYSTEM_ERROR resubmitobjs.append(resubmitobj) continue # o.out("Resubmit failed: %s" % msg) # o.reply_and_exit(o.ERROR) resubmitobj['status'] = True resubmitobj['new_job_id'] = new_job_id resubmitobjs.append(resubmitobj) # o.out("Resubmit successful: %s" % msg) # o.out("%s" % msg) output_objects.append({ 'object_type': 'resubmitobjs', 'resubmitobjs': resubmitobjs }) return (output_objects, status)
def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id, op_header=False) title_entry = find_entry(output_objects, 'title') title_entry['text'] = 'Runtime env support' output_objects.append({'object_type': 'header', 'text' : 'Test runtime environment support'}) client_dir = client_id_dir(client_id) defaults = signature()[1] (validate_status, accepted) = validate_input_and_cert( user_arguments_dict, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: logger.warning('%s invalid input: %s' % (op_name, accepted)) return (accepted, returnvalues.CLIENT_ERROR) resource_list = accepted['unique_resource_name'] re_name = accepted['re_name'][-1] status = returnvalues.OK visible_res = user_visible_res_confs(configuration, client_id) if not re_name: output_objects.append( {'object_type': 'error_text', 'text' : 'Please specify the name of the runtime environment!'}) return (output_objects, returnvalues.CLIENT_ERROR) if not valid_dir_input(configuration.re_home, re_name): logger.warning( "possible illegal directory traversal attempt re_name '%s'" % re_name) output_objects.append({'object_type': 'error_text', 'text' : 'Illegal runtime environment name: "%s"' % re_name}) return (output_objects, returnvalues.CLIENT_ERROR) # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = os.path.abspath(os.path.join(configuration.user_home, client_dir)) + os.sep for visible_res_name in resource_list: if not visible_res_name in visible_res.keys(): logger.warning('User %s not allowed to view %s (%s)' % \ (client_id, visible_res_name, visible_res.keys())) output_objects.append({'object_type': 'error_text', 'text': 'invalid resource %s' % \ visible_res_name}) status = returnvalues.CLIENT_ERROR continue if not is_owner(client_id, visible_res_name, configuration.resource_home, logger): output_objects.append( {'object_type': 'error_text', 'text': 'You must be an owner of the resource to validate runtime ' 'environment support. (resource %s)' % visible_res_name}) status = returnvalues.CLIENT_ERROR continue (re_dict, re_msg) = get_re_dict(re_name, configuration) if not re_dict: output_objects.append( {'object_type': 'error_text', 'text': 'Could not get re_dict %s' % re_msg}) status = returnvalues.SYSTEM_ERROR continue if not testresource_has_re_specified(visible_res_name, re_name, configuration): output_objects.append( {'object_type': 'error_text', 'text': 'You must specify the runtime environment in the resource' 'configuration before verifying if it is supported!'}) status = returnvalues.CLIENT_ERROR continue base64string = '' for stringpart in re_dict['TESTPROCEDURE']: base64string += stringpart mrslfile_content = base64.decodestring(base64string) try: (filehandle, mrslfile) = tempfile.mkstemp(text=True) os.write(filehandle, mrslfile_content) os.close(filehandle) create_verify_files(['status', 'stdout', 'stderr'], re_name, re_dict, base_dir, logger) except Exception, exc: output_objects.append( {'object_type': 'error_text', 'text': 'Could not write test job for %s: %s' % (visible_res_name, exc)}) status = returnvalues.SYSTEM_ERROR continue forceddestination_dict = {'UNIQUE_RESOURCE_NAME': visible_res_name, 'RE_NAME': re_name} (success, msg) = new_job(mrslfile, client_id, configuration, forceddestination_dict) if not success: output_objects.append( {'object_type': 'error_text', 'text': 'Submit test job failed %s: %s' % (visible_res_name, msg)}) status = returnvalues.SYSTEM_ERROR try: os.remove(mrslfile) except: pass output_objects.append( {'object_type': 'text', 'text': 'Runtime environment test job for %s successfuly submitted! %s' \ % (visible_res_name, msg)})
output_objects.append({ 'object_type': 'error_text', 'text': '%s could not be written! (%s)' % (path, str(exc).replace(base_dir, '')) }) return (output_objects, returnvalues.SYSTEM_ERROR) if submitjob: output_objects.append({ 'object_type': 'text', 'text': 'Submitting saved file to parser' }) submitstatus = {'object_type': 'submitstatus', 'name': path} (new_job_status, msg, job_id) = new_job(abs_path, client_id, configuration, False, True) if not new_job_status: submitstatus['status'] = False submitstatus['message'] = msg else: submitstatus['status'] = True submitstatus['job_id'] = job_id output_objects.append({ 'object_type': 'submitstatuslist', 'submitstatuslist': [submitstatus] }) output_objects.append({ 'object_type': 'link', 'destination': 'javascript:history.back()',
def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id, op_title=True, op_header=False) client_dir = client_id_dir(client_id) status = returnvalues.OK defaults = signature()[1] # TODO: all non-file fields should be validated!! # Input fields are mostly file stuff so do not validate it validate_args = dict([(key, user_arguments_dict.get(key, val)) for \ (key, val) in defaults.items()]) (validate_status, accepted) = validate_input_and_cert( validate_args, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: return (accepted, returnvalues.CLIENT_ERROR) if not correct_handler('POST'): output_objects.append( {'object_type': 'error_text', 'text' : 'Only accepting POST requests to prevent unintended updates'}) return (output_objects, returnvalues.CLIENT_ERROR) output_objects.append({'object_type': 'header', 'text' : '%s submit job/file' % configuration.short_title}) submitstatuslist = [] fileuploadobjs = [] filenumber = 0 file_fields = int(accepted.get('file_fields', -1)[-1]) save_as_default = (accepted['save_as_default'][-1] != 'False') # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = os.path.abspath(os.path.join(configuration.user_home, client_dir)) + os.sep mrsl = '' while True: (content, file_type) = handle_form_input(filenumber, user_arguments_dict, configuration) if not content: if filenumber < file_fields: # blank field but file_fields indicates more fields filenumber += 1 continue # no field count and no data for filenumber found break # always append mrsltextarea if available! try: mrsl = user_arguments_dict['mrsltextarea_%s' % filenumber][0] content += mrsl except: pass content += '\n' mrslfiles_to_parse = [] submit_mrslfiles = False submitmrsl_key = 'submitmrsl_%s' % filenumber if user_arguments_dict.has_key(submitmrsl_key): val = str(user_arguments_dict[submitmrsl_key][0]).upper() if val == 'ON' or val == 'TRUE': submit_mrslfiles = True fileuploadobj = {'object_type': 'fileuploadobj', 'submitmrsl': submit_mrslfiles} if file_type == 'plain': # get filename filename_key = 'FILENAME_%s' % filenumber if not user_arguments_dict.has_key(filename_key): output_objects.append( {'object_type': 'error_text','text' : ("The specified file_type is 'plain', but a filename" \ "value was not found. The missing control should be " \ "named %s") % filename_key}) return (output_objects, returnvalues.CLIENT_ERROR) filename_val = convert_control_value_to_line(filename_key, user_arguments_dict) if not filename_val: if filenumber < file_fields: # blank field but file_fields indicates more fields filenumber += 1 continue output_objects.append( {'object_type': 'error_text', 'text' : 'No filename found - please make sure you provide a " \ "file to upload'}) return (output_objects, returnvalues.CLIENT_ERROR) local_filename = base_dir + filename_val valid_status, valid_err = valid_user_path_name(filename_val, local_filename, base_dir) if not valid_status: output_objects.append( {'object_type': 'error_text', 'text': valid_err}) return (output_objects, returnvalues.CLIENT_ERROR) # A new filename was created, write content to file if not write_file(content, local_filename, logger): output_objects.append({'object_type': 'error_text', 'text': 'Could not write: %s' % local_filename}) return (output_objects, returnvalues.SYSTEM_ERROR) fileuploadobj['saved'] = True # msg += "%s created!" % local_filename fileuploadobj['name'] = os.sep\ + convert_control_value_to_line(filename_key, user_arguments_dict) if local_filename.upper().endswith('.MRSL')\ and submit_mrslfiles: mrslfiles_to_parse.append[local_filename] elif file_type == 'fileupload': # An input type=file was found fileupload_key = 'fileupload_%s_0_0' % filenumber # if not fileitem.filename: if not user_arguments_dict.has_key(fileupload_key + 'filename'): output_objects.append({'object_type': 'error_text', 'text': 'NO FILENAME error'}) return (output_objects, returnvalues.CLIENT_ERROR) base_name = strip_dir(user_arguments_dict[fileupload_key + 'filename']) if not base_name: if filenumber < file_fields: # blank field but file_fields indicates more fields # output_objects.append({'object_type': 'text', 'text': # 'skip item %d' % filenumber}) filenumber += 1 continue output_objects.append( {'object_type': 'error_text', 'text' : 'No filename found - please make sure you provide a " \ "file to upload'}) return (output_objects, returnvalues.CLIENT_ERROR) extract_packages = False extract_key = 'extract_%s' % filenumber if user_arguments_dict.has_key(extract_key): val = str(user_arguments_dict[extract_key][0]).upper() if val == 'ON' or val == 'TRUE': extract_packages = True remote_filename = '' default_remotefilename_key = 'default_remotefilename_%s'\ % filenumber if user_arguments_dict.has_key(default_remotefilename_key): remote_filename = \ user_arguments_dict[default_remotefilename_key][0] # remotefilename overwrites default_remotefilename if it exists remotefilename_key = 'remotefilename_%s' % filenumber if user_arguments_dict.has_key(remotefilename_key): remote_filename = \ user_arguments_dict[remotefilename_key][0] if not remote_filename: remote_filename = base_name # if remote_filename is a directory, use client's local filename # for the last part of the filename if remote_filename.strip().endswith(os.sep): remote_filename += base_name if not user_arguments_dict.has_key(fileupload_key): output_objects.append({'object_type': 'error_text', 'text': 'File content not found!'}) return (output_objects, returnvalues.CLIENT_ERROR) local_filename = os.path.abspath(base_dir + remote_filename) valid_status, valid_err = valid_user_path_name(remote_filename, local_filename, base_dir) if not valid_status: output_objects.append( {'object_type': 'error_text', 'text': valid_err}) return (output_objects, returnvalues.CLIENT_ERROR) if not os.path.isdir(os.path.dirname(local_filename)): try: os.makedirs(os.path.dirname(local_filename), 0777) except Exception: fileuploadobj['message'] = \ {'object_type': 'error_text', 'text': 'Exception creating dirs %s'\ % os.path.dirname(local_filename)} fileuploadobj['name'] = remote_filename # reads uploaded file into memory binary = user_arguments_dict.has_key('%s_is_encoded' % fileupload_key) if binary: data = user_arguments_dict[fileupload_key][-1] data = str(base64.decodestring(data)) else: data = user_arguments_dict[fileupload_key][-1] # write file in memory to disk if not write_file(data, local_filename, configuration.logger): output_objects.append( {'object_type': 'error_text', 'text': 'Error writing file in memory to disk'}) return (output_objects, returnvalues.SYSTEM_ERROR) fileuploadobj['saved'] = True # Tell the client about the current settings (extract and submit) # extract_str = "Extract files from packages (.zip, .tar.gz, .tgz, .tar.bz2): " # if extract_packages: # extract_str += "ON" # else: # extract_str += "OFF" # output_objects.append({"object_type":"text", "text":extract_str}) fileuploadobj['extract_packages'] = extract_packages # submit_str = "Submit mRSL files to parser (including .mRSL files in packages!): " # if submit_mrslfiles: # submit_str += "ON" # else: # submit_str += "OFF" # output_objects.append({"object_type":"text", "text":submit_str}) # handle file package if extract_packages\ and (local_filename.upper().endswith('.ZIP') or local_filename.upper().endswith('.TAR.GZ') or local_filename.upper().endswith('.TGZ') or local_filename.upper().endswith('.TAR.BZ2')): (status, msg) = handle_package_upload(local_filename, remote_filename, client_id, configuration, submit_mrslfiles, os.path.dirname(local_filename)) if status: if submit_mrslfiles: if isinstance(msg, basestring): output_objects.append( {'object_type': 'error_text', 'text': 'Error in submit: %s' % msg}) else: submitstatuslist = msg else: output_objects.append({'object_type': 'text', 'text': msg}) else: if submit_mrslfiles: if isinstance(msg, basestring): output_objects.append( {'object_type': 'error_text', 'text': 'Error in unpack: %s' % msg}) else: submitstatuslist = msg else: output_objects.append({'object_type': 'error_text', 'text': 'Problems unpacking: %s' % msg}) else: # output_objects.append({"object_type":"text", "text":msg}) # a "normal" (non-package) file was uploaded try: output_objects.append({'object_type': 'text', 'text' : 'File saved: %s' % remote_filename}) except Exception, err: output_objects.append({'object_type': 'error_text', 'text' : 'File seems to be saved, but could not get file size %s' % err}) return (output_objects, returnvalues.SYSTEM_ERROR) fileuploadobj['size'] = os.path.getsize(local_filename) fileuploadobj['name'] = remote_filename # Check if the extension is .mRSL if local_filename.upper().endswith('.MRSL')\ and submit_mrslfiles: # A .mrsl file was uploaded! # output_objects.append({"object_type":"text", "text": # "File name on MiG server: %s" # % (remote_filename)}) mrslfiles_to_parse.append(local_filename) else: # mrsl file created by html controls. create filename. Loop until # a filename that do not exits is created html_generated_mrsl_dir = base_dir + 'html_generated_mrsl' if os.path.exists(html_generated_mrsl_dir)\ and not os.path.isdir(html_generated_mrsl_dir): # oops, user might have created a file with the same name output_objects.append( {'object_type': 'error_text', 'text' : 'Please make sure %s does not exist or is a directory!' % 'html_generated_mrsl/'}) return (output_objects, returnvalues.CLIENT_ERROR) if not os.path.isdir(html_generated_mrsl_dir): os.mkdir(html_generated_mrsl_dir) while True: time_c = time.gmtime() timestamp = '%s_%s_%s__%s_%s_%s' % ( time_c[1], time_c[2], time_c[0], time_c[3], time_c[4], time_c[5], ) local_filename = html_generated_mrsl_dir\ + '/TextAreaAt_' + timestamp + '.mRSL' if not os.path.isfile(local_filename): break # A new filename was created, write content to file if not write_file(content, local_filename, logger): output_objects.append( {'object_type': 'error_text', 'text': 'Could not write: %s' % local_filename}) return (output_objects, returnvalues.SYSTEM_ERROR) fileuploadobj['name'] = os.sep\ + 'html_generated_mrsl/TextAreaAt_' + timestamp\ + '.mRSL' fileuploadobj['size'] = os.path.getsize(local_filename) mrslfiles_to_parse.append(local_filename) fileuploadobjs.append(fileuploadobj) # Submit selected file(s) for mrslfile in mrslfiles_to_parse: # do not reveal full path of mrsl file to client relative_filename = os.sep + mrslfile.replace(base_dir, '') submitstatus = {'object_type': 'submitstatus', 'name': relative_filename} (status, newmsg, job_id) = new_job(mrslfile, client_id, configuration, False, True) if not status: # output_objects.append({"object_type":"error_text", "text":"%s" # % newmsg}) submitstatus['status'] = False submitstatus['message'] = newmsg else: # return (output_objects, returnvalues.CLIENT_ERROR) submitstatus['status'] = True submitstatus['job_id'] = job_id # output_objects.append({"object_type":"text", "text":"%s" # % newmsg}) submitstatuslist.append(submitstatus) # prepare next loop filenumber += 1
def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id) client_dir = client_id_dir(client_id) defaults = signature()[1] (validate_status, accepted) = validate_input_and_cert( user_arguments_dict, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: return (accepted, returnvalues.CLIENT_ERROR) if not correct_handler('POST'): output_objects.append( {'object_type': 'error_text', 'text' : 'Only accepting POST requests to prevent unintended updates'}) return (output_objects, returnvalues.CLIENT_ERROR) patterns = accepted['job_id'] if not patterns: output_objects.append({'object_type': 'error_text', 'text' : 'No job_id specified!'}) return (output_objects, returnvalues.NO_SUCH_JOB_ID) # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = \ os.path.abspath(os.path.join(configuration.mrsl_files_dir, client_dir)) + os.sep filelist = [] keywords_dict = mrslkeywords.get_keywords_dict(configuration) for pattern in patterns: pattern = pattern.strip() # Backward compatibility - all_jobs keyword should match all jobs if pattern == all_jobs: pattern = '*' # Check directory traversal attempts before actual handling to avoid # leaking information about file system layout while allowing # consistent error messages unfiltered_match = glob.glob(base_dir + pattern + '.mRSL') match = [] for server_path in unfiltered_match: real_path = os.path.abspath(server_path) if not valid_user_path(real_path, base_dir, True): # out of bounds - save user warning for later to allow # partial match: # ../*/* is technically allowed to match own files. logger.warning('%s tried to %s restricted path %s ! (%s)' % (client_id, op_name, real_path, pattern)) continue # Insert valid job files in filelist for later treatment match.append(real_path) # Now actually treat list of allowed matchings and notify if no # (allowed) match if not match: output_objects.append( {'object_type': 'error_text', 'text' : '%s: You do not have any matching job IDs!' % pattern}) status = returnvalues.CLIENT_ERROR else: filelist += match # resubmit is hard on the server if len(filelist) > 100: output_objects.append({'object_type': 'error_text', 'text' : 'Too many matching jobs (%s)!' % len(filelist)}) return (output_objects, returnvalues.CLIENT_ERROR) resubmitobjs = [] status = returnvalues.OK for filepath in filelist: mrsl_file = filepath.replace(base_dir, '') job_id = mrsl_file.replace('.mRSL', '') # ("Resubmitting job with job_id: %s" % job_id) resubmitobj = {'object_type': 'resubmitobj', 'job_id': job_id} mrsl_dict = unpickle(filepath, logger) if not mrsl_dict: resubmitobj['message'] = "No such job: %s (%s)" % (job_id, mrsl_file) status = returnvalues.CLIENT_ERROR resubmitobjs.append(resubmitobj) continue resubmit_items = keywords_dict.keys() # loop selected keywords and create mRSL string resubmit_job_string = '' for dict_elem in resubmit_items: value = '' # Extract job value with fallback to default to support optional # fields job_value = mrsl_dict.get(dict_elem, keywords_dict[dict_elem]['Value']) if keywords_dict[dict_elem]['Type'].startswith('multiplekeyvalues'): for (elem_key, elem_val) in job_value: if elem_key: value += '%s=%s\n' % (str(elem_key).strip(), str(elem_val).strip()) elif keywords_dict[dict_elem]['Type'].startswith('multiple'): for elem in job_value: if elem: value += '%s\n' % str(elem).rstrip() else: if str(job_value): value += '%s\n' % str(job_value).rstrip() # Only insert keywords with an associated value if value: if value.rstrip() != '': resubmit_job_string += '''::%s:: %s ''' % (dict_elem, value.rstrip()) # save tempfile (filehandle, tempfilename) = \ tempfile.mkstemp(dir=configuration.mig_system_files, text=True) os.write(filehandle, resubmit_job_string) os.close(filehandle) # submit job the usual way (new_job_status, msg, new_job_id) = new_job(tempfilename, client_id, configuration, False, True) if not new_job_status: resubmitobj['status'] = False resubmitobj['message'] = msg status = returnvalues.SYSTEM_ERROR resubmitobjs.append(resubmitobj) continue # o.out("Resubmit failed: %s" % msg) # o.reply_and_exit(o.ERROR) resubmitobj['status'] = True resubmitobj['new_job_id'] = new_job_id resubmitobjs.append(resubmitobj) # o.out("Resubmit successful: %s" % msg) # o.out("%s" % msg) output_objects.append({'object_type': 'resubmitobjs', 'resubmitobjs' : resubmitobjs}) return (output_objects, status)
tmpfile = None # save to temporary file try: (filehandle, real_path) = tempfile.mkstemp(text=True) os.write(filehandle, mrsl) os.close(filehandle) except Exception, err: msg = 'Failed to write temporary mRSL file: %s' % err _logger.error(msg) return (False, msg) # submit it try: (job_status, newmsg, job_id) = \ new_job(real_path, client_id, configuration, False, True) except Exception, exc: msg = "Failed to submit new job. Possible invalid mRSL?" _logger.error(msg) return (False, msg) if not job_status: _logger.error(newmsg) return (False, newmsg) return (True, job_id) def job_api_read(configuration, workflow_session, job_type=JOB,
def main(client_id, user_arguments_dict): """Main function used by front end""" (configuration, logger, output_objects, op_name) = \ initialize_main_variables(client_id, op_title=True, op_header=False) client_dir = client_id_dir(client_id) status = returnvalues.OK defaults = signature()[1] # TODO: do we need to cover more non-file fields? # All non-file fields must be validated validate_args = dict([(key, user_arguments_dict.get(key, val)) for \ (key, val) in defaults.items()]) # IMPORTANT: we must explicitly inlude CSRF token validate_args[csrf_field] = user_arguments_dict.get( csrf_field, ['AllowMe']) (validate_status, accepted) = validate_input_and_cert( validate_args, defaults, output_objects, client_id, configuration, allow_rejects=False, ) if not validate_status: return (accepted, returnvalues.CLIENT_ERROR) output_objects.append({ 'object_type': 'header', 'text': '%s file handling' % configuration.short_title }) submitstatuslist = [] fileuploadobjs = [] filenumber = 0 file_fields = int(accepted.get('file_fields', -1)[-1]) save_as_default = (accepted['save_as_default'][-1] != 'False') if not safe_handler(configuration, 'post', op_name, client_id, get_csrf_limit(configuration), accepted): output_objects.append({ 'object_type': 'error_text', 'text': '''Only accepting CSRF-filtered POST requests to prevent unintended updates''' }) return (output_objects, returnvalues.CLIENT_ERROR) # Please note that base_dir must end in slash to avoid access to other # user dirs when own name is a prefix of another user name base_dir = os.path.abspath( os.path.join(configuration.user_home, client_dir)) + os.sep mrsl = '' while True: (content, file_type) = handle_form_input(filenumber, user_arguments_dict, configuration) if not content: if filenumber < file_fields: # blank field but file_fields indicates more fields filenumber += 1 continue # no field count and no data for filenumber found break # always append mrsltextarea if available! try: mrsl = user_arguments_dict['mrsltextarea_%s' % filenumber][0] content += mrsl except: pass content += '\n' mrslfiles_to_parse = [] submit_mrslfiles = False submitmrsl_key = 'submitmrsl_%s' % filenumber if configuration.site_enable_jobs and \ user_arguments_dict.has_key(submitmrsl_key): val = str(user_arguments_dict[submitmrsl_key][0]).upper() if val == 'ON' or val == 'TRUE': submit_mrslfiles = True fileuploadobj = { 'object_type': 'fileuploadobj', 'submitmrsl': submit_mrslfiles } if file_type == 'plain': # get filename filename_key = 'FILENAME_%s' % filenumber if not user_arguments_dict.has_key(filename_key): output_objects.append( {'object_type': 'error_text','text' : ("The specified file_type is 'plain', but a filename" \ "value was not found. The missing control should be " \ "named %s") % filename_key}) return (output_objects, returnvalues.CLIENT_ERROR) filename_val = convert_control_value_to_line( filename_key, user_arguments_dict) if not filename_val: if filenumber < file_fields: # blank field but file_fields indicates more fields filenumber += 1 continue output_objects.append({ 'object_type': 'error_text', 'text': 'No filename found - please make sure you provide a " \ "file to upload' }) return (output_objects, returnvalues.CLIENT_ERROR) local_filename = base_dir + filename_val valid_status, valid_err = valid_user_path_name( filename_val, local_filename, base_dir) if not valid_status: output_objects.append({ 'object_type': 'error_text', 'text': valid_err }) return (output_objects, returnvalues.CLIENT_ERROR) # A new filename was created, write content to file if not write_file(content, local_filename, logger): logger.error("%s failed to write plain file %s" % \ (op_name, local_filename)) output_objects.append({ 'object_type': 'error_text', 'text': 'Could not write: %s' % local_filename }) return (output_objects, returnvalues.SYSTEM_ERROR) logger.info("%s wrote plain file %s" % (op_name, local_filename)) fileuploadobj['saved'] = True # msg += "%s created!" % local_filename fileuploadobj['name'] = os.sep\ + convert_control_value_to_line(filename_key, user_arguments_dict) if local_filename.upper().endswith('.MRSL')\ and submit_mrslfiles: mrslfiles_to_parse.append(local_filename) elif file_type == 'fileupload': # An input type=file was found fileupload_key = 'fileupload_%s_0_0' % filenumber # if not fileitem.filename: if not user_arguments_dict.has_key(fileupload_key + 'filename'): output_objects.append({ 'object_type': 'error_text', 'text': 'NO FILENAME error' }) return (output_objects, returnvalues.CLIENT_ERROR) base_name = strip_dir(user_arguments_dict[fileupload_key + 'filename']) if not base_name: if filenumber < file_fields: # blank field but file_fields indicates more fields # output_objects.append({'object_type': 'text', 'text': # 'skip item %d' % filenumber}) filenumber += 1 continue output_objects.append({ 'object_type': 'error_text', 'text': 'No filename found - please make sure you provide a " \ "file to upload' }) return (output_objects, returnvalues.CLIENT_ERROR) extract_packages = False extract_key = 'extract_%s' % filenumber if user_arguments_dict.has_key(extract_key): val = str(user_arguments_dict[extract_key][0]).upper() if val == 'ON' or val == 'TRUE': extract_packages = True remote_filename = '' default_remotefilename_key = 'default_remotefilename_%s'\ % filenumber if user_arguments_dict.has_key(default_remotefilename_key): remote_filename = \ user_arguments_dict[default_remotefilename_key][0] # remotefilename overwrites default_remotefilename if it exists remotefilename_key = 'remotefilename_%s' % filenumber if user_arguments_dict.has_key(remotefilename_key): remote_filename = \ user_arguments_dict[remotefilename_key][0] if not remote_filename: remote_filename = base_name # if remote_filename is a directory, use client's local filename # for the last part of the filename if remote_filename.strip().endswith(os.sep): remote_filename += base_name if not user_arguments_dict.has_key(fileupload_key): output_objects.append({ 'object_type': 'error_text', 'text': 'File content not found!' }) return (output_objects, returnvalues.CLIENT_ERROR) local_filename = os.path.abspath(base_dir + remote_filename) valid_status, valid_err = valid_user_path_name( remote_filename, local_filename, base_dir) if not valid_status: output_objects.append({ 'object_type': 'error_text', 'text': valid_err }) return (output_objects, returnvalues.CLIENT_ERROR) if not os.path.isdir(os.path.dirname(local_filename)): try: os.makedirs(os.path.dirname(local_filename), 0775) except Exception: fileuploadobj['message'] = \ {'object_type': 'error_text', 'text': 'Exception creating dirs %s'\ % os.path.dirname(local_filename)} fileuploadobj['name'] = remote_filename # reads uploaded file into memory binary = user_arguments_dict.has_key('%s_is_encoded' % fileupload_key) if binary: data = user_arguments_dict[fileupload_key][-1] data = str(base64.decodestring(data)) else: data = user_arguments_dict[fileupload_key][-1] # write file in memory to disk if not write_file(data, local_filename, configuration.logger): logger.error("%s failed to write upload file %s" % \ (op_name, local_filename)) output_objects.append({ 'object_type': 'error_text', 'text': 'Error writing file in memory to disk' }) return (output_objects, returnvalues.SYSTEM_ERROR) logger.info("%s wrote upload file %s" % (op_name, local_filename)) fileuploadobj['saved'] = True # Tell the client about the current settings (extract and submit) # extract_str = "Extract files from packages (.zip, .tar.gz, .tgz, .tar.bz2): " # if extract_packages: # extract_str += "ON" # else: # extract_str += "OFF" # output_objects.append({"object_type":"text", "text":extract_str}) fileuploadobj['extract_packages'] = extract_packages # submit_str = "Submit mRSL files to parser (including .mRSL files in packages!): " # if submit_mrslfiles: # submit_str += "ON" # else: # submit_str += "OFF" # output_objects.append({"object_type":"text", "text":submit_str}) # handle file package if extract_packages\ and (local_filename.upper().endswith('.ZIP') or local_filename.upper().endswith('.TAR.GZ') or local_filename.upper().endswith('.TGZ') or local_filename.upper().endswith('.TAR.BZ2')): (upload_status, msg) = handle_package_upload(local_filename, remote_filename, client_id, configuration, submit_mrslfiles, os.path.dirname(local_filename)) if upload_status: if submit_mrslfiles: if isinstance(msg, basestring): output_objects.append({ 'object_type': 'error_text', 'text': 'Error in submit: %s' % msg }) else: submitstatuslist = msg else: output_objects.append({ 'object_type': 'text', 'text': msg }) else: if submit_mrslfiles: if isinstance(msg, basestring): output_objects.append({ 'object_type': 'error_text', 'text': 'Error in unpack: %s' % msg }) else: submitstatuslist = msg else: output_objects.append({ 'object_type': 'error_text', 'text': 'Problems unpacking: %s' % msg }) else: # output_objects.append({"object_type":"text", "text":msg}) # a "normal" (non-package) file was uploaded try: output_objects.append({ 'object_type': 'text', 'text': 'File saved: %s' % remote_filename }) except Exception, err: output_objects.append({ 'object_type': 'error_text', 'text': 'File seems to be saved, but could not get file size %s' % err }) return (output_objects, returnvalues.SYSTEM_ERROR) fileuploadobj['size'] = os.path.getsize(local_filename) fileuploadobj['name'] = remote_filename # Check if the extension is .mRSL if local_filename.upper().endswith('.MRSL')\ and submit_mrslfiles: # A .mrsl file was uploaded! # output_objects.append({"object_type":"text", "text": # "File name on MiG server: %s" # % (remote_filename)}) mrslfiles_to_parse.append(local_filename) else: # mrsl file created by html controls. create filename. Loop until # a filename that do not exits is created html_generated_mrsl_dir = base_dir + 'html_generated_mrsl' if os.path.exists(html_generated_mrsl_dir)\ and not os.path.isdir(html_generated_mrsl_dir): # oops, user might have created a file with the same name output_objects.append({ 'object_type': 'error_text', 'text': 'Please make sure %s does not exist or is a directory!' % 'html_generated_mrsl/' }) return (output_objects, returnvalues.CLIENT_ERROR) if not os.path.isdir(html_generated_mrsl_dir): os.mkdir(html_generated_mrsl_dir) while True: time_c = time.gmtime() timestamp = '%s_%s_%s__%s_%s_%s' % ( time_c[1], time_c[2], time_c[0], time_c[3], time_c[4], time_c[5], ) local_filename = html_generated_mrsl_dir\ + '/TextAreaAt_' + timestamp + '.mRSL' if not os.path.isfile(local_filename): break # A new filename was created, write content to file if not write_file(content, local_filename, logger): output_objects.append({ 'object_type': 'error_text', 'text': 'Could not write: %s' % local_filename }) return (output_objects, returnvalues.SYSTEM_ERROR) fileuploadobj['name'] = os.sep\ + 'html_generated_mrsl/TextAreaAt_' + timestamp\ + '.mRSL' fileuploadobj['size'] = os.path.getsize(local_filename) mrslfiles_to_parse.append(local_filename) fileuploadobjs.append(fileuploadobj) # Submit selected file(s) for mrslfile in mrslfiles_to_parse: # do not reveal full path of mrsl file to client relative_filename = os.sep + mrslfile.replace(base_dir, '') submitstatus = { 'object_type': 'submitstatus', 'name': relative_filename } (new_status, newmsg, job_id) = new_job(mrslfile, client_id, configuration, False, True) if not new_status: # output_objects.append({"object_type":"error_text", "text":"%s" # % newmsg}) submitstatus['status'] = False submitstatus['message'] = newmsg else: # return (output_objects, returnvalues.CLIENT_ERROR) submitstatus['status'] = True submitstatus['job_id'] = job_id # output_objects.append({"object_type":"text", "text":"%s" # % newmsg}) submitstatuslist.append(submitstatus) # prepare next loop filenumber += 1