def dynam_load_ftmgmt(self, filetype, filepat=None): """ Dynamically load a filetype mgmt class """ #print " REG DYNLOAD" #if miscutils.fwdebug_check(1, 'FILEMGMT_DEBUG'): # miscutils.fwdebug_print("LOADING filetype = %s" % self.filetype) if self.ftmgmt is None or self.filetype is None or filetype != self.filetype: #print " REG DYNLOAD LOAD %s" % filetype classname = 'filemgmt.ftmgmt_generic.FtMgmtGeneric' if filetype in self.config['filetype_metadata']: if 'filetype_mgmt' in self.config['filetype_metadata'][filetype] and \ self.config['filetype_metadata'][filetype]['filetype_mgmt'] is not None: classname = self.config['filetype_metadata'][filetype]['filetype_mgmt'] else: miscutils.fwdie(f'Error: Invalid filetype ({filetype})', 1) # dynamically load class for the filetype filetype_mgmt = None filetype_mgmt_class = miscutils.dynamically_load_class(classname) try: filetype_mgmt = filetype_mgmt_class(filetype, self, self.config, filepat) except Exception as err: print(f"ERROR\nError: creating filemgmt object\n{err}") raise self.filetype = filetype self.filepat = filepat self.ftmgmt = filetype_mgmt
def __init__(self, des_services, des_http_section, numtries=5, secondsBetweenRetries=30): """Get password for curl and initialize existing_directories variable. >>> C = HttpUtils('test_http_utils/.desservices.ini', 'file-http') >>> len(C.curl_password) 25""" try: # Parse the .desservices.ini file: self.auth_params = serviceaccess.parse(des_services, des_http_section) # Create the user/password switch: self.curl_password = f"{self.auth_params['user']}:{self.auth_params['passwd']}" except Exception as err: miscutils.fwdie(f"Unable to get curl password ({err})", fmdefs.FM_EXIT_FAILURE) self.curl = pycurl.Curl() self.curl.setopt(pycurl.USERPWD, self.curl_password) self.existing_directories = set() self.numtries = numtries self.src = None self.dst = None self.filesize = None self.secondsBetweenRetries = secondsBetweenRetries
def get_config_vals(archive_info, config, keylist): """ Search given dicts for specific values Parameters ---------- archive_info : dict Dictionary of the archive data config : dict Dictionary of the config data keylist : dict Dictionary of the items to locate in the dictionaries Return ------ Dict of the requested values """ info = {} for k, stat in keylist.items(): if archive_info is not None and k in archive_info: info[k] = archive_info[k] elif config is not None and k in config: info[k] = config[k] elif stat.lower() == fmdefs.REQUIRED: miscutils.fwdebug_print('******************************') miscutils.fwdebug_print('keylist = %s' % keylist) miscutils.fwdebug_print('archive_info = %s' % archive_info) miscutils.fwdebug_print('config = %s' % config) miscutils.fwdie('Error: Could not find required key (%s)' % k, 1, 2) return info
def save_file_info(filemgmt, task_id, ftype, filelist): """ Save file metadata and contents """ # filelist = list of file dicts # check which files already have metadata in database # don't bother with updating existing data, as files should be immutable misslist = list_missing_metadata(filemgmt, ftype, filelist) if misslist: print( f"\tSaving file metadata/contents on {len(misslist):0d} files....", flush=True) starttime = time.time() try: filemgmt.register_file_data(ftype, misslist, None, task_id, False, None, None) except fmerrors.RequiredMetadataMissingError as err: miscutils.fwdie(f"Error: {err}", 1) endtime = time.time() print(f"DONE ({endtime - starttime:0.2f} secs)", flush=True) # check which files already have contents in database # don't bother with updating existing data, as files should be immutable misslist = list_missing_contents(filemgmt, ftype, filelist) if misslist: print(f"\tSaving file contents on {len(misslist):0d} files....", flush=True) starttime = time.time() filemgmt.ingest_contents(ftype, misslist) endtime = time.time() print(f"DONE ({endtime - starttime:0.2f} secs)", flush=True)
def get_config_vals(archive_info, config, keylist): """ Search given dicts for specific values Parameters ---------- archive_info : dict Dictionary of the archive data config : dict Dictionary of the config data keylist : dict Dictionary of the keys to be searched for and whether they are required or optional Returns ------- dict The serach results """ info = {} for k, st in keylist.items(): if archive_info is not None and k in archive_info: info[k] = archive_info[k] elif config is not None and k in config: info[k] = config[k] elif st.lower() == 'req': miscutils.fwdebug(0, 'FMUTILS_DEBUG', '******************************') miscutils.fwdebug(0, 'FMUTILS_DEBUG', 'keylist = %s' % keylist) miscutils.fwdebug(0, 'FMUTILS_DEBUG', 'archive_info = %s' % archive_info) miscutils.fwdebug(0, 'FMUTILS_DEBUG', 'config = %s' % config) miscutils.fwdie('Error: Could not find required key (%s)' % k, 1, 2) return info
def get_file_disk_info_path(path, save_md5sum=False): """ Returns information about files on disk from given path Parameters ---------- path : str String of the path to probe save_md5sum : bool Whether to calculate the md5sum (True) or no (False), default is False Returns ------- dict The resulting data """ # if relative path, is treated relative to current directory if not os.path.exists(path): miscutils.fwdie("Error: path does not exist (%s)" % (path), 1) fileinfo = {} for (dirpath, _, filenames) in os.walk(path): for name in filenames: fname = os.path.join(dirpath, name) fileinfo[fname] = get_single_file_disk_info(fname, save_md5sum) return fileinfo
def run_sys_checks(): """ Check valid system environemnt (e.g., condor setup) """ ### Check for Condor in path as well as daemons running print '\tChecking for Condor....', max_tries = 5 try_delay = 60 # seconds trycnt = 0 done = False while not done and trycnt < max_tries: try: trycnt += 1 pfwcondor.check_condor('7.4.0') done = True except pfwcondor.CondorException as excpt: print "ERROR" print str(excpt) if trycnt < max_tries: print "\nSleeping and then retrying" time.sleep(try_delay) except Exception as excpt: print "ERROR" raise excpt if not done and trycnt >= max_tries: miscutils.fwdie("Too many errors. Aborting.", pfwdefs.PF_EXIT_FAILURE) print "DONE"
def read_fullnames_from_listfile(listfile, linefmt, colstr): """ Read a list file returning fullnames from the list """ if miscutils.fwdebug_check(3, 'INTGMISC_DEBUG'): miscutils.fwdebug_print('colstr=%s' % colstr) columns = convert_col_string_to_list(colstr, False) if miscutils.fwdebug_check(3, 'INTGMISC_DEBUG'): miscutils.fwdebug_print('columns=%s' % columns) fullnames = {} pos2fsect = {} for pos in range(0, len(columns)): lcol = columns[pos].lower() if lcol.endswith('.fullname'): filesect = lcol[:-9] pos2fsect[pos] = filesect fullnames[filesect] = [] # else a data column instead of a filename if miscutils.fwdebug_check(3, 'INTGMISC_DEBUG'): miscutils.fwdebug_print('pos2fsect=%s' % pos2fsect) if linefmt == 'config' or linefmt == 'wcl': miscutils.fwdie( 'Error: wcl list format not currently supported (%s)' % listfile, 1) else: with open(listfile, 'r') as listfh: for line in listfh: line = line.strip() # convert line into python list lineinfo = [] if linefmt == 'textcsv': lineinfo = miscutils.fwsplit(line, ',') elif linefmt == 'texttab': lineinfo = miscutils.fwsplit(line, '\t') elif linefmt == 'textsp': lineinfo = miscutils.fwsplit(line, ' ') else: miscutils.fwdie('Error: unknown linefmt (%s)' % linefmt, 1) # save each fullname in line for pos in pos2fsect: # use common routine to parse actual fullname (e.g., remove [0]) parsemask = miscutils.CU_PARSE_PATH | miscutils.CU_PARSE_FILENAME | \ miscutils.CU_PARSE_COMPRESSION (path, filename, compression) = miscutils.parse_fullname( lineinfo[pos], parsemask) fname = "%s/%s" % (path, filename) if compression is not None: fname += compression fullnames[pos2fsect[pos]].append(fname) if miscutils.fwdebug_check(6, 'INTGMISC_DEBUG'): miscutils.fwdebug_print('fullnames = %s' % fullnames) return fullnames
def get_file_disk_info(arg): """ Returns information about files on disk from given list or path""" if isinstance(arg, list): return get_file_disk_info_list(arg) if isinstance(arg, str): return get_file_disk_info_path(arg) miscutils.fwdie(f"Error: argument to get_file_disk_info isn't a list or a path ({type(arg)})", 1)
def get_list_filenames(path): """ create a list of files in given path """ if not os.path.exists(path): miscutils.fwdie("Error: could not find path: %s" % path, 1) filelist = [] for (dirpath, dirnames, filenames) in os.walk(path): for fname in filenames: filelist.append(dirpath+'/'+fname) return filelist
def __init__(self, homeinfo, targetinfo, mvmtinfo, tstats, config=None): self.home = homeinfo self.target = targetinfo self.mvmt = mvmtinfo self.config = config self.tstats = tstats for x in (DES_SERVICES, DES_HTTP_SECTION): if x not in self.config: miscutils.fwdie('Error: Missing %s in config' % x, 1) self.HU = http_utils.HttpUtils(self.config[DES_SERVICES], self.config[DES_HTTP_SECTION])
def get_param_info(self, keys, opts=None): """ returns values for given list of keys """ info = {} for key, stat in keys.items(): (found, value) = self.search(key, opts) if found: info[key] = value else: if stat.lower() == 'req': miscutils.fwdie("Error: Config does not contain value for %s" % key, pfwdefs.PF_EXIT_FAILURE, 2) return info
def __init__(self, homeinfo, targetinfo, mvmtinfo, tstats, config=None): """ initialize object """ self.home = homeinfo self.target = targetinfo self.mvmt = mvmtinfo self.config = config self.tstats = tstats for reqkey in (DES_SERVICES, DES_HTTP_SECTION): if reqkey not in self.config: miscutils.fwdie('Error: Missing %s in config' % reqkey, 1) self.HU = http_utils.HttpUtils(self.config[DES_SERVICES], self.config[DES_HTTP_SECTION])
def __init__(self, initvals=None, fullconfig=None): if not miscutils.use_db(initvals): miscutils.fwdie("Error: FileMgmtDB class requires DB but was told not to use DB", 1) self.desservices = None if 'des_services' in initvals: self.desservices = initvals['des_services'] self.section = None if 'des_db_section' in initvals: self.section = initvals['des_db_section'] elif 'section' in initvals: self.section = initvals['section'] if 'threaded' in initvals: self.threaded = initvals['threaded'] have_connect = False if 'connection' in initvals: try: desdmdbi.DesDmDbi.__init__(self, connection=initvals['connection']) have_connect = True except: miscutils.fwdebug_print('Could not connect to DB using transferred connection, falling back to new connection.') if not have_connect: try: desdmdbi.DesDmDbi.__init__(self, self.desservices, self.section) except Exception as err: miscutils.fwdie((f"Error: problem connecting to database: {err}\n" + "\tCheck desservices file and environment variables"), 1) # precedence - db, file, params self.config = WCL() if miscutils.checkTrue('get_db_config', initvals, False): self._get_config_from_db() if 'wclfile' in initvals and initvals['wclfile'] is not None: fileconfig = WCL() with open(initvals['wclfile'], 'r') as infh: fileconfig.read(infh) self.config.update(fileconfig) if fullconfig is not None: self.config.update(fullconfig) self.config.update(initvals) self.filetype = None self.ftmgmt = None self.filepat = None
def get_file_disk_info_path(path, save_md5sum=False): """ Returns information about files on disk from given path """ # if relative path, is treated relative to current directory if not os.path.exists(path): miscutils.fwdie(f"Error: path does not exist ({path})", 1) fileinfo = {} for (dirpath, _, filenames) in os.walk(path): for name in filenames: fname = os.path.join(dirpath, name) fileinfo[fname] = get_single_file_disk_info(fname, save_md5sum) return fileinfo
def __init__(self, des_services, des_http_section, destination): try: # Parse the .desservices.ini file: self.auth_params = serviceaccess.parse(des_services, des_http_section) # set up the connection self.webdav = webdav.connect(destination, username=self.auth_params['user'], password=self.auth_params['passwd']) except Exception as err: miscutils.fwdie("Unable to get curl password (%s)" % err, fmdefs.FM_EXIT_FAILURE) self.existing_directories = set()
def get_file_archive_info_path(self, path, arname, compress_order=fmdefs.FM_PREFER_COMPRESSED): """ Return information about file stored in archive (e.g., filename, size, rel_filename, ...) """ # sanity checks if 'archive' not in self.config: miscutils.fwdie('Error: Missing archive section in config', 1) if arname not in self.config['archive']: miscutils.fwdie(f'Error: Invalid archive name ({arname})', 1) if 'root' not in self.config['archive'][arname]: miscutils.fwdie(f"Error: Missing root in archive def ({self.config['archive'][arname]})", 1) if not isinstance(compress_order, list): miscutils.fwdie('Error: Invalid compress_order. ' 'It must be a list of compression extensions (including None)', 1) likestr = self.get_regex_clause('path', f'{path}/.*') # query DB getting all files regardless of compression sql = ("select filetype,file_archive_info.* from desfile, file_archive_info " + f"where archive_name='{arname}' and desfile.id=file_archive_info.desfile_id " + f"and {likestr}") curs = self.cursor() curs.execute(sql) desc = [d[0].lower() for d in curs.description] fullnames = {} for cmpord in compress_order: fullnames[cmpord] = {} list_by_name = {} for line in curs: ldict = dict(zip(desc, line)) #print "line = ", line if ldict['compression'] is None: compext = "" else: compext = ldict['compression'] ldict['rel_filename'] = f"{ldict['path']}/{ldict['filename']}{compext}" fullnames[ldict['compression']][ldict['filename']] = ldict list_by_name[ldict['filename']] = True #print "uncompressed:", len(fullnames[None]) #print "compressed:", len(fullnames['.fz']) # go through given list of filenames and find archive location and compreesion archiveinfo = {} for name in list_by_name.keys(): #print name for cmpord in compress_order: # follow compression preference #print "cmpord = ", cmpord if name in fullnames[cmpord]: archiveinfo[name] = fullnames[cmpord][name] break #print "archiveinfo = ", archiveinfo return archiveinfo
def remove_column_format(columns): """ Return columns minus any formatting specification """ columns2 = [] for col in columns: if col.startswith('$FMT{'): rmatch = re.match(r'\$FMT\{\s*([^,]+)\s*,\s*(\S+)\s*\}', col) if rmatch: columns2.append(rmatch.group(2).strip()) else: miscutils.fwdie("Error: invalid FMT column: %s" % (col), 1) else: columns2.append(col) return columns2
def blocking_transfer(self, filelist): """ Do a blocking transfer Parameters ---------- filelist : dict Dictionary of the files to be transferred Returns ------- Transfer results """ #print "blocking_transfer" #print "\tfilelist: ", filelist srcroot = self.src_archive_info['root'] dstroot = self.dst_archive_info['root'] files2copy = copy.deepcopy(filelist) for fname, _ in filelist.items(): files2copy[fname]['src'] = "%s/%s" % (srcroot, files2copy[fname]['src']) files2copy[fname]['dst'] = "%s/%s" % (dstroot, files2copy[fname]['dst']) credfile = None if X509_USER_PROXY in self.config: credfile = self.config[X509_USER_PROXY] elif 'X509_USER_PROXY' in os.environ: credfile = os.environ['X509_USER_PROXY'] if credfile is None: miscutils.fwdie( 'Error: Cannot determine location of X509 proxy. Either set in config or environment.', 1) proxy_valid_hrs = 12 if PROXY_VALID_HRS in self.config: proxy_valid_hrs = self.config[PROXY_VALID_HRS] if GO_USER not in self.config: miscutils.fwdie('Error: Missing %s in config' % GO_USER, 1) goclient = globonline.DESGlobusOnline(self.src_archive_info, self.dst_archive_info, credfile, self.config[GO_USER], proxy_valid_hrs) return goclient.blocking_transfer(files2copy)
def get_file_archive_info(self, filelist, arname, compress_order=fmdefs.FM_PREFER_COMPRESSED): # sanity checks if 'archive' not in self.config: miscutils.fwdie('Error: Missing archive section in config', 1) if arname not in self.config['archive']: miscutils.fwdie(f'Error: Invalid archive name ({arname})', 1) if 'root' not in self.config['archive'][arname]: miscutils.fwdie( f"Error: Missing root in archive def ({self.config['archive'][arname]})", 1) if not isinstance(compress_order, list): miscutils.fwdie( 'Error: Invalid compress_order. It must be a list of compression extensions (including None)', 1) # walk archive to get all files fullnames = {} for p in compress_order: fullnames[p] = {} root = self.config['archive'][arname]['root'] root = root.rstrip("/") # canonicalize - remove trailing / to ensure for (dirpath, _, filenames) in os.walk(root, followlinks=True): for fname in filenames: d = {} (d['filename'], d['compression']) = miscutils.parse_fullname(fname, 3) d['filesize'] = os.path.getsize(f"{dirpath}/{fname}") d['path'] = dirpath[len(root) + 1:] if d['compression'] is None: compext = "" else: compext = d['compression'] d['rel_filename'] = f"{d['path']}/{d['filename']}{compext}" fullnames[d['compression']][d['filename']] = d print("uncompressed:", len(fullnames[None])) print("compressed:", len(fullnames['.fz'])) # go through given list of filenames and find archive location and compreesion archiveinfo = {} for name in filelist: #print name for p in compress_order: # follow compression preference #print "p = ", p if name in fullnames[p]: archiveinfo[name] = fullnames[p][name] break print("archiveinfo = ", archiveinfo) return archiveinfo
def create_copy_items(srchdu, metastatus, file_header_names): """ Create the update wcl for headers that should be copied from another header """ updateDict = collections.OrderedDict() for name in file_header_names: if metastatus == fmdefs.META_REQUIRED: updateDict[name] = f"$REQCOPY{{{name.upper()}:{srchdu}}}" elif metastatus == fmdefs.META_OPTIONAL: updateDict[name] = f"$OPTCOPY{{{name.upper()}:{srchdu}}}" else: miscutils.fwdie( f'Error: Unknown metadata metastatus ({metastatus})', metadefs.MD_EXIT_FAILURE) return updateDict
def get_config_vals(archive_info, config, keylist): """ Search given dicts for specific values """ info = {} for k, stat in keylist.items(): if archive_info is not None and k in archive_info: info[k] = archive_info[k] elif config is not None and k in config: info[k] = config[k] elif stat.lower() == 'req': miscutils.fwdebug_print('******************************') miscutils.fwdebug_print(f'keylist = {keylist}') miscutils.fwdebug_print(f'archive_info = {archive_info}') miscutils.fwdebug_print(f'config = {config}') miscutils.fwdie(f'Error: Could not find required key ({k})', 1, 2) return info
def __init__(self, homeinfo, targetinfo, mvmtinfo, tstats, config=None): #pylint: disable=unused-argument self.home = homeinfo self.target = targetinfo self.config = config self.tstats = tstats m = re.match(r"(http://[^/]+)(/.*)", homeinfo['root_http']) dest = m.group(1) for x in (DES_SERVICES, DES_HTTP_SECTION): if x not in self.config: miscutils.fwdie('Error: Missing %s in config' % x, 1) self.HU = ewd_utils.EwdUtils(self.config[DES_SERVICES], self.config[DES_HTTP_SECTION], dest.replace('http://', ''))
def get_list_filenames(ingestpath, filetype): """ create a dictionary by filetype of files in given path """ if ingestpath[0] != '/': cwd = os.getenv('PWD') # don't use getcwd as it canonicallizes path # which is not what we want for links internal to archive ingestpath = cwd + '/' + ingestpath if not os.path.exists(ingestpath): miscutils.fwdie(f"Error: could not find ingestpath: {ingestpath}", 1) filelist = [] for (dirpath, _, filenames) in os.walk(ingestpath): for fname in filenames: filelist.append(dirpath + '/' + fname) return {filetype: filelist}
def get_filepath(self, pathtype, dirpat=None, searchopts=None): """ Return filepath based upon given pathtype and directory pattern name """ # get filename pattern from global settings: if not dirpat: (found, dirpat) = self.search(pfwdefs.DIRPAT, searchopts) if not found: miscutils.fwdie("Error: Could not find dirpat", pfwdefs.PF_EXIT_FAILURE) if dirpat in self[pfwdefs.DIRPATSECT]: filepathpat = self[pfwdefs.DIRPATSECT][dirpat][pathtype] else: miscutils.fwdie("Error: Could not find pattern %s in directory patterns" % \ dirpat, pfwdefs.PF_EXIT_FAILURE) results = replfuncs.replace_vars_single(filepathpat, self, searchopts) return results
def get_job_file_mvmt_info(self): """ Return contents of ops_job_file_mvmt and ops_job_file_mvmt_val tables as a dictionary Returns ------- dict """ # [site][home][target][key] = [val] where req key is mvmtclass sql = "select site,home_archive,target_archive,mvmtclass from ops_job_file_mvmt" curs = self.cursor() curs.execute(sql) info = collections.OrderedDict() for (site, home, target, mvmt) in curs: if home is None: home = 'no_archive' if target is None: target = 'no_archive' if site not in info: info[site] = collections.OrderedDict() if home not in info[site]: info[site][home] = collections.OrderedDict() info[site][home][target] = collections.OrderedDict( {'mvmtclass': mvmt}) sql = "select site,home_archive,target_archive,key,val from ops_job_file_mvmt_val" curs = self.cursor() curs.execute(sql) for (site, home, target, key, val) in curs: if home is None: home = 'no_archive' if target is None: target = 'no_archive' if (site not in info or home not in info[site] or target not in info[site][home]): miscutils.fwdie( f"Error: found info in ops_job_file_mvmt_val({site}, {home}, {target}, {key}, {val}) which is not in ops_job_file_mvmt", 1) info[site][home][target][key] = val return info
def create_list_of_files(filemgmt, args): """ Create list of files to register """ filelist = None starttime = time.time() if args['filetype'] is not None: if not filemgmt.is_valid_filetype(args['filetype']): miscutils.fwdie(f"Error: Invalid filetype ({args['filetype']})", 1) filelist = get_list_filenames(args['path'], args['filetype']) elif args['list'] is not None: filelist = parse_provided_list(args['list']) endtime = time.time() print(f"DONE ({endtime - starttime:0.2f} secs)", flush=True) print(f"\t{sum([len(x) for x in filelist.values()])} files in list", flush=True) if miscutils.fwdebug_check(6, "REGISTER_FILES_DEBUG"): miscutils.fwdebug_print(f"filelist={filelist}") return filelist
def main(argv=None): """Program entry point. """ if argv is None: argv = sys.argv if len(argv) != 3: miscutils.fwdie("Usage: runqueries.pl configfile condorjobid\n", pfwdefs.PF_EXIT_FAILURE) configfile = argv[1] condorid = argv[2] config = pfwconfig.PfwConfig({'wclfile': configfile}) # log condor jobid log_pfw_event(config, config['curr_block'], 'runqueries', 'j', ['cid', condorid]) if pfwdefs.SW_MODULELIST not in config: miscutils.fwdie("Error: No modules to run.", pfwdefs.PF_EXIT_FAILURE) ### Get master lists and files calling external codes when needed modulelist = miscutils.fwsplit(config[pfwdefs.SW_MODULELIST].lower()) modules_prev_in_list = {} for modname in modulelist: if modname not in config[pfwdefs.SW_MODULESECT]: miscutils.fwdie("Error: Could not find module description for module %s\n" % (modname), pfwdefs.PF_EXIT_FAILURE) runqueries(config, configfile, modname, modules_prev_in_list) modules_prev_in_list[modname] = True return 0
def parse_provided_list(listname): """ create dictionary of files from list in file """ #cwd = os.getcwd() cwd = os.getenv('PWD') # don't use getcwd as it canonicallizes path # which is not what we want for links internal to archive uniqfiles = {} filelist = {} try: with open(listname, "r") as listfh: for line in listfh: (fullname, filetype) = miscutils.fwsplit(line, ',') if fullname[0] != '/': fullname = cwd + '/' + fullname if not os.path.exists(fullname): miscutils.fwdie( f"Error: could not find file on disk: {fullname}", 1) (_, fname) = os.path.split(fullname) if fname in uniqfiles: miscutils.fwdie( f"Error: Found duplicate filenames in list: {fname}", 1) uniqfiles[fname] = True if filetype not in filelist: filelist[filetype] = [] filelist[filetype].append(fullname) except IOError as err: miscutils.fwdie(f"Error: Problems reading file '{listname}': {err}", 1) return filelist
def min_wcl_checks(config): """ execute minimal submit wcl checks """ max_label_length = 30 # todo: figure out how to get length from DB msg = "ERROR\nError: Missing %s in submit wcl. Make sure submitting correct file. " msg += "Aborting submission." # check that reqnum and unitname exist (exists, _) = config.search(pfwdefs.REQNUM, {intgdefs.REPLACE_VARS: True}) if not exists: miscutils.fwdie(msg % pfwdefs.REQNUM, pfwdefs.PF_EXIT_FAILURE) (exists, _) = config.search(pfwdefs.UNITNAME, {intgdefs.REPLACE_VARS: True}) if not exists: miscutils.fwdie(msg % pfwdefs.UNITNAME, pfwdefs.PF_EXIT_FAILURE) # check that any given labels are short enough (exists, labelstr) = config.search(pfwdefs.SW_LABEL, {intgdefs.REPLACE_VARS: True}) if exists: labels = miscutils.fwsplit(labelstr, ',') for lab in labels: if len(lab) > max_label_length: miscutils.fwdie("ERROR\nError: label %s is longer (%s) than allowed (%s). " \ "Aborting submission." % \ (lab, len(lab), max_label_length), pfwdefs.PF_EXIT_FAILURE)