def parseSwitches(): ''' Parses the arguments passed by the user ''' Script.parseCommandLine( ignoreErrors = True ) args = Script.getPositionalArgs() if args: subLogger.error( "Found the following positional args '%s', but we only accept switches" % args ) subLogger.error( "Please, check documentation below" ) Script.showHelp() DIRACExit( 1 ) switches = dict( Script.getUnprocessedSwitches() ) for key in ( 'status', 'se','limit'): if not key in switches: print "You're not using switch --%s, query may take long!" % key if 'status' in switches.keys(): if not switches[ 'status' ] in ( 'New', 'Offline', 'Waiting','Failed','StageSubmitted','Staged' ): subLogger.error( "Found \"%s\" as Status value. Incorrect value used!" % switches[ 'status' ] ) subLogger.error( "Please, check documentation below" ) Script.showHelp() DIRACExit( 1 ) subLogger.debug( "The switches used are:" ) map( subLogger.debug, switches.iteritems() ) return switches
def parseSwitches(): """ Parses the arguments passed by the user """ Script.parseCommandLine( ignoreErrors = True ) args = Script.getPositionalArgs() if args: subLogger.error( "Found the following positional args '%s', but we only accept switches" % args ) subLogger.error( "Please, check documentation below" ) Script.showHelp() DIRACExit( 1 ) switches = dict( Script.getUnprocessedSwitches() ) switches.setdefault( 'statusType' , None ) switches.setdefault( 'releaseToken', False ) for key in ( 'element', 'name', 'reason' ): if not key in switches: subLogger.error( "%s Switch missing" % key ) subLogger.error( "Please, check documentation below" ) Script.showHelp() DIRACExit( 1 ) if not switches[ 'element' ] in ( 'Site', 'Resource', 'Node' ): subLogger.error( "Found %s as element switch" % switches[ 'element' ] ) subLogger.error( "Please, check documentation below" ) Script.showHelp() DIRACExit( 1 ) subLogger.debug( "The switches used are:" ) map( subLogger.debug, switches.iteritems() ) return switches
def parseSwitches(): ''' Parses the arguments passed by the user ''' Script.parseCommandLine( ignoreErrors = True ) args = Script.getPositionalArgs() if args: subLogger.error( "Found the following positional args '%s', but we only accept switches" % args ) subLogger.error( "Please, check documentation below" ) Script.showHelp() DIRACExit( 1 ) switches = dict( Script.getUnprocessedSwitches() ) # Default values switches.setdefault( 'element', None ) if not switches[ 'element' ] in ( 'all', 'Site', 'Resource', 'Node', None ): subLogger.error( "Found %s as element switch" % switches[ 'element' ] ) subLogger.error( "Please, check documentation below" ) Script.showHelp() DIRACExit( 1 ) subLogger.debug( "The switches used are:" ) map( subLogger.debug, switches.iteritems() ) return switches
def checkSettings(self): """ parse arguments """ args = Script.getPositionalArgs() if len(args) < 1: return S_ERROR() self.setTransID(args[0]) return S_OK()
def parseSwitches(): ''' Parses the arguments passed by the user ''' Script.parseCommandLine( ignoreErrors = True ) args = Script.getPositionalArgs() if len( args ) == 0: error( "Argument is missing, you should enter either 'test', 'update', 'view', 'remove', 'restore'" ) else: cmd = args[0].lower() switches = dict( Script.getUnprocessedSwitches() ) diracConfigFile = CSHelpers.gConfig.diracConfigFilePath # Default values switches.setdefault( 'name', None ) switches.setdefault( 'element', None ) switches.setdefault( 'elementType', None ) switches.setdefault( 'setup', "Defaults" ) switches.setdefault( 'file', diracConfigFile ) #switches.setdefault( 'statusType', None ) #switches.setdefault( 'status', None ) # when it's a add/modify query and status/reason/statusType are not specified #then some specific defaults are set up if cmd == 'test': if switches['elementType'] is None and switches['element'] is None and switches['name'] is None: error( "to test, you should enter at least one switch: either element, elmentType, or name" ) else: if switches[ 'element' ] != None: switches[ 'element' ] = switches[ 'element' ].title() if switches[ 'element' ] not in ( 'Resource', 'Site' ): error( "you should enter either 'Site' or 'Resource' for switch 'element'" ) if switches[ 'elementType' ] != None: switches[ 'elementType' ] = switches[ 'elementType' ].title() if switches[ 'file' ] == None: error("Enter a fullpath dirac config file location when using 'file' option") elif cmd == 'remove' : if 'policy' not in switches or switches['policy'] is None: error( "to remove, you should enter a policy" ) elif cmd == 'update' or cmd == 'view' or cmd == 'restore': pass else: error( "Incorrect argument: you should enter either 'test', 'update', 'view', 'remove', 'restore'" ) subLogger.debug( "The switches used are:" ) map( subLogger.debug, switches.iteritems() ) return args, switches
def _findInFC(): """Find something in the FileCatalog""" from DIRAC import exit as dexit clip = _Params() clip.registerSwitches() Script.parseCommandLine() args = Script.getPositionalArgs() if len(args)<2: Script.showHelp('ERROR: Not enough arguments') gLogger.error("Run %s --help" % SCRIPTNAME ) dexit(1) path = args[0] if path == '.': path = '/' ## Check that the first argument is not a MetaQuery if any( op in path for op in OPLIST ): gLogger.error("ERROR: Path '%s' is not a valid path! The first argument must be a path" % path) gLogger.error("Run %s --help" % SCRIPTNAME ) dexit(1) gLogger.verbose("Path:", path) metaQuery = args[1:] metaDataDict = _createQueryDict(metaQuery) gLogger.verbose("Query:",str(metaDataDict)) if not metaDataDict: gLogger.info("No query") dexit(1) fc = FileCatalogClient() res = fc.findFilesByMetadata(metaDataDict, path) if not res['OK']: gLogger.error(res['Message']) dexit(1) if not res['Value']: gLogger.notice("No files found") listToPrint = None if clip.printOnlyDirectories: listToPrint = set( "/".join(fullpath.split("/")[:-1]) for fullpath in res['Value'] ) else: listToPrint = res['Value'] for entry in listToPrint: print entry dexit(0)
def parseSwitches(): ''' Parses the arguments passed by the user ''' Script.parseCommandLine( ignoreErrors = True ) args = Script.getPositionalArgs() if len( args ) < 3: error( "Missing all mandatory 'query', 'element', 'tableType' arguments" ) elif not args[0].lower() in ( 'select', 'add', 'modify', 'delete' ): error( "Incorrect 'query' argument" ) elif not args[1].lower() in ( 'site', 'resource', 'component', 'node' ): error( "Incorrect 'element' argument" ) elif not args[2].lower() in ( 'status', 'log', 'history' ): error( "Incorrect 'tableType' argument" ) else: query = args[0].lower() switches = dict( Script.getUnprocessedSwitches() ) # Default values switches.setdefault( 'name', None ) switches.setdefault( 'statusType', None ) switches.setdefault( 'status', None ) switches.setdefault( 'elementType', None ) switches.setdefault( 'reason', None ) switches.setdefault( 'lastCheckTime', None ) switches.setdefault( 'tokenOwner', None ) if 'status' in switches and switches[ 'status' ] is not None: switches[ 'status' ] = switches[ 'status' ].title() if not switches[ 'status' ] in ( 'Active', 'Probing', 'Degraded', 'Banned', 'Unknown', 'Error' ): error( "'%s' is an invalid argument for switch 'status'" % switches[ 'status' ] ) # when it's a add/modify query and status/reason/statusType are not specified #then some specific defaults are set up if query == 'add' or query == 'modify': if not 'status' in switches or switches[ 'status' ] is None: switches[ 'status' ] = 'Unknown' if not 'reason' in switches or switches[ 'reason' ] is None: switches[ 'reason' ] = 'Unknown reason' if not 'statusType' in switches or switches[ 'statusType' ] is None: switches[ 'statusType' ] = 'all' subLogger.debug( "The switches used are:" ) map( subLogger.debug, switches.iteritems() ) return args, switches
def parseSwitches(): """ Parses the arguments passed by the user """ Script.parseCommandLine(ignoreErrors=True) args = Script.getPositionalArgs() if len(args) < 3: error("Missing all mandatory 'query', 'element', 'tableType' arguments") elif not args[0].lower() in ("select", "add", "modify", "delete"): error("Incorrect 'query' argument") elif not args[1].lower() in ("site", "resource", "component", "node"): error("Incorrect 'element' argument") elif not args[2].lower() in ("status", "log", "history"): error("Incorrect 'tableType' argument") else: query = args[0].lower() switches = dict(Script.getUnprocessedSwitches()) # Default values switches.setdefault("name", None) switches.setdefault("statusType", None) switches.setdefault("status", None) switches.setdefault("elementType", None) switches.setdefault("reason", None) switches.setdefault("lastCheckTime", None) switches.setdefault("tokenOwner", None) if "status" in switches and switches["status"] is not None: switches["status"] = switches["status"].title() if not switches["status"] in ("Active", "Probing", "Degraded", "Banned", "Unknown", "Error"): error("'%s' is an invalid argument for switch 'status'" % switches["status"]) # when it's a add/modify query and status/reason/statusType are not specified # then some specific defaults are set up if query == "add" or query == "modify": if not "status" in switches or switches["status"] is None: switches["status"] = "Unknown" if not "reason" in switches or switches["reason"] is None: switches["reason"] = "Unknown reason" if not "statusType" in switches or switches["statusType"] is None: switches["statusType"] = "all" subLogger.debug("The switches used are:") map(subLogger.debug, switches.iteritems()) return args, switches
def checkSettings(self): """check if all required parameters are set, print error message and return S_ERROR if not""" args = Script.getPositionalArgs() if len(args) < 4: self.errorMessages.append("ERROR: Not enough arguments") else: self.setProdID( args[0] ) self.setTargetSE( args[1] ) self.setSourceSE( args[2] ) self.setDatatype( args[3] ) self.checkProxy() if not self.errorMessages: return S_OK() gLogger.error("\n".join(self.errorMessages)) Script.showHelp() return S_ERROR()
def parseSwitches(): ''' Parses the arguments passed by the user ''' Script.parseCommandLine(ignoreErrors=True) args = Script.getPositionalArgs() if not len(args) == 2: Script.showHelp() params = {} params['ce'] = None params['site'] = None params['host'] = None params['vo'] = None params['info'] = args[0] ret = getProxyInfo(disableVOMS=True) if ret['OK'] and 'group' in ret['Value']: params['vo'] = getVOForGroup(ret['Value']['group']) else: Script.gLogger.error('Could not determine VO') Script.showHelp() if params['info'] in ['ce', 'ce-state', 'ce-cluster', 'ce-vo']: params['ce'] = args[1] elif params['info']in ['site', 'site-se']: params['site'] = args[1] else: Script.gLogger.error('Wrong argument value') Script.showHelp() for unprocSw in Script.getUnprocessedSwitches(): if unprocSw[0] in ("H", "host"): params['host'] = unprocSw[1] if unprocSw[0] in ("V", "vo"): params['vo'] = unprocSw[1] return params
def parseSwitches(): ''' Parse switches and positional arguments given to the script ''' # Parse the command line and initialize DIRAC Script.parseCommandLine(ignoreErrors=False) # Get the list of services servicesList = Script.getPositionalArgs() gLogger.info('This is the servicesList %s:' % servicesList) # Gets the rest of the switches = dict(Script.getUnprocessedSwitches()) gLogger.debug("The switches used are:") map(gLogger.debug, switches.iteritems()) switches['servicesList'] = servicesList return switches
def parseSwitches(): """ Parses the arguments passed by the user """ Script.parseCommandLine(ignoreErrors=True) args = Script.getPositionalArgs() if args: subLogger.error("Found the following positional args '%s', but we only accept switches" % args) subLogger.error("Please, check documentation below") Script.showHelp() DIRACExit(1) switches = dict(Script.getUnprocessedSwitches()) # Default values switches.setdefault("elementType", None) switches.setdefault("name", None) switches.setdefault("tokenOwner", None) switches.setdefault("statusType", None) switches.setdefault("status", None) if not "element" in switches: subLogger.error("element Switch missing") subLogger.error("Please, check documentation below") Script.showHelp() DIRACExit(1) if not switches["element"] in ("Site", "Resource", "Node"): subLogger.error("Found %s as element switch" % switches["element"]) subLogger.error("Please, check documentation below") Script.showHelp() DIRACExit(1) subLogger.debug("The switches used are:") map(subLogger.debug, switches.iteritems()) return switches
def parseSwitches(): ''' Parses the arguments passed by the user ''' Script.parseCommandLine( ignoreErrors = True ) args = Script.getPositionalArgs() if len( args ) == 0: error( "Missing mandatory 'query' argument" ) elif not args[0].lower() in ( 'select', 'add', 'delete' ): error( "Missing mandatory argument" ) else: query = args[0].lower() switches = dict( Script.getUnprocessedSwitches() ) # Default values switches.setdefault( 'downtimeID', None ) switches.setdefault( 'element', None ) switches.setdefault( 'name', None ) switches.setdefault( 'startDate', None ) switches.setdefault( 'endDate', None ) switches.setdefault( 'severity', None ) switches.setdefault( 'description', None ) switches.setdefault( 'link', None ) if query in ( 'add', 'delete' ) and switches['downtimeID'] is None: error( "'downtimeID' switch is mandatory for '%s' but found missing" % query ) if query in ( 'add', 'delete' ) and 'ongoing' in switches: error( "'ongoing' switch can be used only with 'select'" ) subLogger.debug( "The switches used are:" ) map( subLogger.debug, switches.iteritems() ) return ( args, switches )
def main(): from DIRAC.Core.Base import Script ### make_CTA_DST options ############################################### Script.registerSwitch( "R:", "run_number=", "Run Number", setRunNumber ) Script.registerSwitch( "I:", "infile=", "Input file", setInfile ) Script.registerSwitch( "T:", "tellist=", "Tellist", setTellist ) Script.registerSwitch( "N:", "nevent=", "Nevent", setNevent ) ### other options ############################################### Script.registerSwitch( "V:", "version=", "HAP version", setVersion ) Script.parseCommandLine( ignoreErrors = True ) args = Script.getPositionalArgs() if len( args ) < 1: Script.showHelp() if infile == None or tellist == None or version == None: Script.showHelp() jobReport.setApplicationStatus('Options badly specified') DIRAC.exit( -1 ) from CTADIRAC.Core.Workflow.Modules.HapRootMacro import HapRootMacro from CTADIRAC.Core.Utilities.SoftwareInstallation import checkSoftwarePackage from CTADIRAC.Core.Utilities.SoftwareInstallation import installSoftwarePackage from CTADIRAC.Core.Utilities.SoftwareInstallation import getSoftwareEnviron from CTADIRAC.Core.Utilities.SoftwareInstallation import localArea from CTADIRAC.Core.Utilities.SoftwareInstallation import sharedArea from DIRAC.Core.Utilities.Subprocess import systemCall from DIRAC.WorkloadManagementSystem.Client.JobReport import JobReport jobID = os.environ['JOBID'] jobID = int( jobID ) jobReport = JobReport( jobID ) HapPack = 'HAP/' + version + '/HAP' packs = ['HESS/v0.2/lib','HESS/v0.3/root',HapPack] for package in packs: DIRAC.gLogger.notice( 'Checking:', package ) if sharedArea: if checkSoftwarePackage( package, sharedArea() )['OK']: DIRAC.gLogger.notice( 'Package found in Shared Area:', package ) continue if localArea: if checkSoftwarePackage( package, localArea() )['OK']: DIRAC.gLogger.notice( 'Package found in Local Area:', package ) continue if installSoftwarePackage( package, localArea() )['OK']: continue DIRAC.gLogger.error( 'Check Failed for software package:', package ) DIRAC.gLogger.error( 'Software package not available') DIRAC.exit( -1 ) hr = HapRootMacro() hr.setSoftwarePackage(HapPack) telconf = os.path.join( localArea(),'HAP/%s/config/%s' % (version,tellist)) infilestr = '"' + infile + '"' telconfstr = '"' + telconf + '"' args = [str(int(RunNum)), infilestr, telconfstr] try: args.extend([nevent]) except NameError: DIRAC.gLogger.info( 'nevent arg not used' ) DIRAC.gLogger.notice( 'make_CTA_DST macro Arguments:', args ) hr.rootMacro = '/hapscripts/dst/make_CTA_DST.C+' hr.rootArguments = args DIRAC.gLogger.notice( 'Executing Hap make_CTA_DST macro' ) res = hr.execute() if not res['OK']: DIRAC.gLogger.error( 'Failed to execute make_CTA_DST macro') jobReport.setApplicationStatus('Failure during make_CTA_DST') DIRAC.exit( -1 ) ############ check existance of output file #### filedst = 'dst_CTA_%08d' % int(RunNum) + '.root' if not os.path.isfile(filedst): DIRAC.gLogger.error('dst file not found:', filedst ) jobReport.setApplicationStatus('make_CTA_DST.C: DST file not created') DIRAC.exit( -1 ) ###################Check std out ############################# DIRAC.gLogger.notice('Executing DST Check step0') ret = getSoftwareEnviron(HapPack) if not ret['OK']: error = ret['Message'] DIRAC.gLogger.error( error, HapPack) DIRAC.exit( -1 ) hapEnviron = ret['Value'] hessroot = hapEnviron['HESSROOT'] check_script = hessroot + '/hapscripts/dst/check_dst0.csh' cmdTuple = [check_script] ret = systemCall( 0, cmdTuple, sendOutput) if not ret['OK']: DIRAC.gLogger.error( 'Failed to execute DST Check step0') jobReport.setApplicationStatus('Check_dst0: Failed') DIRAC.exit( -1 ) status, stdout, stderr = ret['Value'] if status==1: jobReport.setApplicationStatus('Check_dst0: Big problem during the DST production') DIRAC.gLogger.error( 'DST Check step0 reports: Big problem during the DST production' ) DIRAC.exit( -1 ) if status==2: jobReport.setApplicationStatus('Check_dst0: No triggered events') DIRAC.gLogger.notice( 'DST Check step0 reports: No triggered events' ) DIRAC.exit( ) ############# run the CheckDST macro ################# DIRAC.gLogger.notice('Executing DST check step1') hr.rootMacro = '/hapscripts/dst/CheckDST.C+' fileoutstr = '"' + filedst + '"' args = [fileoutstr] DIRAC.gLogger.notice( 'CheckDST macro Arguments:', args ) hr.rootArguments = args DIRAC.gLogger.notice( 'Executing Hap CheckDST macro') res = hr.execute() if not res['OK']: DIRAC.gLogger.error( 'Failure during DST Check step1' ) jobReport.setApplicationStatus('Check_dst1: Failed') DIRAC.exit( -1 ) ######################check stdout of CheckDST macro ########################### DIRAC.gLogger.notice('Executing DST Check step2') check_script = hessroot + '/hapscripts/dst/check_dst2.csh' cmdTuple = [check_script] ret = systemCall( 0, cmdTuple, sendOutput ) if not ret['OK']: DIRAC.gLogger.error( 'Failed to execute DST Check step2') jobReport.setApplicationStatus('Check_dst2: Failed') DIRAC.exit( -1 ) status, stdout, stderr = ret['Value'] if status==1: jobReport.setApplicationStatus('DST Check step2: Big problem during the DST production') DIRAC.gLogger.error( 'DST Check step2 reports: Big problem during the DST production' ) DIRAC.exit( -1 ) if status==2: jobReport.setApplicationStatus('DST Check step2: No triggered events') DIRAC.gLogger.notice( 'DST Check step2 reports: No triggered events' ) DIRAC.exit( ) DIRAC.exit()
# Get the File List By Dataset Name from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient # By a Switch ? for switch in Script.getUnprocessedSwitches(): if switch[0].lower() == "f" or switch[0].lower() == "file-catalog": _fcType = switch[1] _client = FileCatalogClient("DataManagement/" + _fcType) def getLFNsByDataset(dataset): result = _client.getMetadataSet(dataset, True) if not result["OK"]: return list() if not result["Value"]: import sys sys.stderr.write("\nWARNNING: datatset(%s) does not exist.\n"%dataset) return list() metadataDict = result["Value"] lfns = _client.findFilesByMetadata(metadataDict,'/') if not lfns["OK"]: return list() return lfns["Value"] for dataset in Script.getPositionalArgs(): for lfn in getLFNsByDataset(dataset): print lfn
# By a Switch ? for switch in Script.getUnprocessedSwitches(): if switch[0].lower() == "f" or switch[0].lower() == "file-catalog": _fcType = switch[1] _client = FileCatalogClient("DataManagement/" + _fcType) def getLFNsByDataset(dataset): result = _client.getMetadataSet(dataset, True) if not result["OK"]: return list() if not result["Value"]: import sys sys.stderr.write("\nWARNNING: datatset(%s) does not exist.\n" % dataset) return list() metadataDict = result["Value"] lfns = _client.findFilesByMetadata(metadataDict, "/") if not lfns["OK"]: return list() return lfns["Value"] for dataset in Script.getPositionalArgs(): for lfn in getLFNsByDataset(dataset): print lfn
reqClient = ReqClient() if transID: if not taskIDs: gLogger.fatal("If Transformation is set, a list of Tasks should also be set") Script.showHelp() DIRAC.exit(2) # In principle, the task name is unique, so the request name should be unique as well # If ever this would not work anymore, we would need to use the transformationClient # to fetch the ExternalID requests = ['%08d_%08d' % (transID, task) for task in taskIDs] allR = True elif not jobs: requests = [] # Get full list of arguments, with and without comma for arg in [x.strip() for arg in Script.getPositionalArgs() for x in arg.split(',')]: if os.path.exists(arg): lines = open(arg, 'r').readlines() requests += [reqID.strip() for line in lines for reqID in line.split(',')] gLogger.notice("Found %d requests in file" % len(requests)) else: requests.append(arg) allR = True else: res = reqClient.getRequestIDsForJobs(jobs) if not res['OK']: gLogger.fatal("Error getting request for jobs", res['Message']) DIRAC.exit(2) if res['Value']['Failed']: gLogger.error("No request found for jobs %s" % ','.join(sorted(str(job) for job in res['Value']['Failed']))) requests = sorted(res['Value']['Successful'].values())
# add the sequence of executables job.setupWorkflow(debug=True) # submit to the Transformation System res = submit_ts(job) # debug Script.gLogger.info(job.workflow) return res ######################################################### if __name__ == '__main__': """ Do things """ ARGS = Script.getPositionalArgs() if len(ARGS) != 6: Script.showHelp() try: RES = run_prod3(ARGS) if not RES['OK']: DIRAC.gLogger.error(RES['Message']) DIRAC.exit(-1) else: DIRAC.gLogger.notice('Done') except RuntimeError: DIRAC.gLogger.exception() DIRAC.exit(-1)
def main(): Script.registerSwitch("D", "sync", "Make target directory identical to source") Script.registerSwitch("j:", "parallel=", "Multithreaded download and upload") Script.parseCommandLine(ignoreErrors=False) args = Script.getPositionalArgs() if len(args) < 1 or len(args) > 3: Script.showHelp() sync = False parallel = 1 for switch in Script.getUnprocessedSwitches(): if switch[0].lower() == "s" or switch[0].lower() == "sync": sync = True if switch[0].lower() == "j" or switch[0].lower() == "parallel": parallel = int(switch[1]) from DIRAC import S_OK, S_ERROR from DIRAC import gConfig, gLogger from DIRAC.Core.Utilities.List import breakListIntoChunks from DIRAC.Core.Utilities.ReturnValues import returnSingleResult from DIRAC.Core.Utilities.File import mkDir from DIRAC.Resources.Catalog.FileCatalog import FileCatalog from DIRAC.DataManagementSystem.Client.DataManager import DataManager from DIRAC.Resources.Storage.StorageElement import StorageElement def getSetOfLocalDirectoriesAndFiles(path): """Return a set of all directories and subdirectories and a set of files contained therein for a given local path """ fullPath = os.path.abspath(path) if not os.path.isdir(fullPath): return S_ERROR("The path: " + fullPath + " does not exist!") directories = set() files = set() for dirname, dirnames, filenames in os.walk(path): # add path to all subdirectories first. for subdirname in dirnames: fullSubdirname = os.path.join(dirname, subdirname) fullSubdirname = os.path.abspath(fullSubdirname) fullSubdirname = fullSubdirname.replace(fullPath, '').lstrip('/') directories.add(fullSubdirname) # add path to all filenames. for filename in filenames: fullFilename = os.path.join(dirname, filename) fullFilename = os.path.abspath(fullFilename) fullFilename = fullFilename.replace(fullPath, '').lstrip('/') fileSize = os.path.getsize(fullPath + "/" + fullFilename) if fileSize > 0: files.add((fullFilename, int(fileSize))) tree = {} tree["Directories"] = directories tree["Files"] = files return S_OK(tree) def getSetOfRemoteSubDirectoriesAndFiles(path, fc, directories, files): """ Recursively traverses all the subdirectories of a directory and returns a set of directories and files """ result = fc.listDirectory(path) if result['OK']: if result['Value']['Successful']: for entry in result['Value']['Successful'][path]['Files']: size = result['Value']['Successful'][path]['Files'][entry][ 'MetaData']['Size'] files.add((entry, size)) for entry in result['Value']['Successful'][path]['SubDirs']: directories.add(entry) res = getSetOfRemoteSubDirectoriesAndFiles( entry, fc, directories, files) if not res['OK']: return S_ERROR('Error: ' + res['Message']) return S_OK() else: return S_ERROR("Error: %s" % result['Value']) else: return S_ERROR("Error:" + result['Message']) def getSetOfRemoteDirectoriesAndFiles(fc, path): """ Return a set of all directories and subdirectories and the therein contained files for a given LFN """ directories = set() files = set() res = getSetOfRemoteSubDirectoriesAndFiles(path, fc, directories, files) if not res['OK']: return S_ERROR('Could not list remote directory: ' + res['Message']) return_directories = set() return_files = set() for myfile in files: return_files.add((myfile[0].replace(path, '').lstrip('/'), myfile[1])) for mydirectory in directories: return_directories.add(mydirectory.replace(path, '').lstrip('/')) tree = {} tree["Directories"] = return_directories tree["Files"] = return_files return S_OK(tree) def isInFileCatalog(fc, path): """ Check if the file is in the File Catalog """ result = fc.listDirectory(path) if result['OK']: if result['Value']['Successful']: return S_OK() else: return S_ERROR() else: return S_ERROR() def getContentToSync(upload, fc, source_dir, dest_dir): """ Return list of files and directories to be create and deleted """ if upload: res = getSetOfRemoteDirectoriesAndFiles(fc, dest_dir) if not res['OK']: return S_ERROR(res['Message']) to_dirs = res['Value']['Directories'] to_files = res['Value']['Files'] res = getSetOfLocalDirectoriesAndFiles(source_dir) if not res['OK']: return S_ERROR(res['Message']) from_dirs = res['Value']['Directories'] from_files = res['Value']['Files'] else: res = getSetOfLocalDirectoriesAndFiles(dest_dir) if not res['OK']: return S_ERROR(res['Message']) to_dirs = res['Value']['Directories'] to_files = res['Value']['Files'] res = getSetOfRemoteDirectoriesAndFiles(fc, source_dir) if not res['OK']: return S_ERROR(res['Message']) from_dirs = res['Value']['Directories'] from_files = res['Value']['Files'] # Create list of directories to delete dirs_delete = list(to_dirs - from_dirs) # Sort the list by depth of directory tree dirs_delete.sort(key=lambda s: -s.count('/')) # Create list of directories to create dirs_create = list(from_dirs - to_dirs) # Sort the list by depth of directory tree dirs_create.sort(key=lambda s: s.count('/')) # Flatten the list of pairs (filename, size) to list of filename files_delete = [pair[0] for pair in list(to_files - from_files)] files_create = [pair[0] for pair in list(from_files - to_files)] create = {} create["Directories"] = dirs_create create["Files"] = files_create delete = {} delete["Directories"] = dirs_delete delete["Files"] = files_delete tree = {} tree["Create"] = create tree["Delete"] = delete return S_OK(tree) def removeRemoteFiles(dm, lfns): """ Remove file from the catalog """ for lfnList in breakListIntoChunks(lfns, 100): res = dm.removeFile(lfnList) if not res['OK']: return S_ERROR("Failed to remove files:" + lfnList + res['Message']) else: return S_OK() def uploadLocalFile(dm, lfn, localfile, storage): """ Upload a local file to a storage element """ res = dm.putAndRegister(lfn, localfile, storage, None) if not res['OK']: return S_ERROR('Error: failed to upload %s to %s' % (lfn, storage)) else: return S_OK('Successfully uploaded file to %s' % storage) def downloadRemoteFile(dm, lfn, destination): """ Download a file from the system """ res = dm.getFile(lfn, destination) if not res['OK']: return S_ERROR('Error: failed to download %s ' % lfn) else: return S_OK('Successfully uploaded file %s' % lfn) def removeStorageDirectoryFromSE(directory, storageElement): """ Delete directory on selected storage element """ se = StorageElement(storageElement, False) res = returnSingleResult(se.exists(directory)) if not res['OK']: return S_ERROR("Failed to obtain existence of directory" + res['Message']) exists = res['Value'] if not exists: return S_OK("The directory %s does not exist at %s " % (directory, storageElement)) res = returnSingleResult(se.removeDirectory(directory, recursive=True)) if not res['OK']: return S_ERROR("Failed to remove storage directory" + res['Message']) return S_OK() def removeRemoteDirectory(fc, lfn): """ Remove file from the catalog """ storageElements = gConfig.getValue( 'Resources/StorageElementGroups/SE_Cleaning_List', []) for storageElement in sorted(storageElements): res = removeStorageDirectoryFromSE(lfn, storageElement) if not res['OK']: return S_ERROR("Failed to clean storage directory at all SE:" + res['Message']) res = returnSingleResult(fc.removeDirectory(lfn, recursive=True)) if not res['OK']: return S_ERROR("Failed to clean storage directory at all SE:" + res['Message']) return S_OK("Successfully removed directory") def createRemoteDirectory(fc, newdir): """ Create directory in file catalog """ result = fc.createDirectory(newdir) if result['OK']: if result['Value']['Successful'] and newdir in result['Value'][ 'Successful']: return S_OK("Successfully created directory:" + newdir) elif result['Value']['Failed'] and newdir in result['Value'][ 'Failed']: return S_ERROR('Failed to create directory: ' + result['Value']['Failed'][newdir]) else: return S_ERROR('Failed to create directory:' + result['Message']) def createLocalDirectory(directory): """ Create local directory """ mkDir(directory) if not os.path.exists(directory): return S_ERROR('Directory creation failed') return S_OK('Created directory successfully') def removeLocalFile(path): """ Remove local file """ try: os.remove(path) except OSError as e: return S_ERROR('Directory creation failed:' + e.strerror) if os.path.isfile(path): return S_ERROR('File deleting failed') return S_OK('Removed file successfully') def removeLocaDirectory(path): """ Remove local directory """ try: os.rmdir(path) except OSError as e: return S_ERROR('Deleting directory failed: ' + e.strerror) if os.path.isdir(path): return S_ERROR('Directory deleting failed') return S_OK('Removed directory successfully') def doUpload(fc, dm, result, source_dir, dest_dir, storage, delete, nthreads): """ Wrapper for uploading files """ if delete: lfns = [ dest_dir + "/" + filename for filename in result['Value']['Delete']['Files'] ] if len(lfns) > 0: res = removeRemoteFiles(dm, lfns) if not res['OK']: gLogger.fatal('Deleting of files: ' + lfns + " -X- [FAILED]" + res['Message']) DIRAC.exit(1) else: gLogger.notice("Deleting " + ', '.join(lfns) + " -> [DONE]") for directoryname in result['Value']['Delete']['Directories']: res = removeRemoteDirectory(fc, dest_dir + "/" + directoryname) if not res['OK']: gLogger.fatal('Deleting of directory: ' + directoryname + " -X- [FAILED] " + res['Message']) DIRAC.exit(1) else: gLogger.notice("Deleting " + directoryname + " -> [DONE]") for directoryname in result['Value']['Create']['Directories']: res = createRemoteDirectory(fc, dest_dir + "/" + directoryname) if not res['OK']: gLogger.fatal('Creation of directory: ' + directoryname + " -X- [FAILED] " + res['Message']) DIRAC.exit(1) else: gLogger.notice("Creating " + directoryname + " -> [DONE]") listOfFiles = result['Value']['Create']['Files'] # Chech that we do not have to many threads if nthreads > len(listOfFiles): nthreads = len(listOfFiles) if nthreads == 0: return S_OK('Upload finished successfully') listOfListOfFiles = chunkList(listOfFiles, nthreads) res = runInParallel(arguments=[dm, source_dir, dest_dir, storage], listOfLists=listOfListOfFiles, function=uploadListOfFiles) if not res['OK']: return S_ERROR("Upload of files failed") return S_OK('Upload finished successfully') def uploadListOfFiles(dm, source_dir, dest_dir, storage, listOfFiles, tID): """ Wrapper for multithreaded uploading of a list of files """ log = gLogger.getSubLogger("[Thread %s] " % tID) threadLine = "[Thread %s]" % tID for filename in listOfFiles: res = uploadLocalFile(dm, dest_dir + "/" + filename, source_dir + "/" + filename, storage) if not res['OK']: log.fatal(threadLine + ' Uploading ' + filename + ' -X- [FAILED] ' + res['Message']) DIRAC.exit(1) else: log.notice(threadLine + " Uploading " + filename + " -> [DONE]") def doDownload(dm, result, source_dir, dest_dir, delete, nthreads): """ Wrapper for downloading files """ if delete: for filename in result['Value']['Delete']['Files']: res = removeLocalFile(dest_dir + "/" + filename) if not res['OK']: gLogger.fatal('Deleting of file: ' + filename + ' -X- [FAILED] ' + res['Message']) DIRAC.exit(1) else: gLogger.notice("Deleting " + filename + " -> [DONE]") for directoryname in result['Value']['Delete']['Directories']: res = removeLocaDirectory(dest_dir + "/" + directoryname) if not res['OK']: gLogger.fatal('Deleting of directory: ' + directoryname + ' -X- [FAILED] ' + res['Message']) DIRAC.exit(1) else: gLogger.notice("Deleting " + directoryname + " -> [DONE]") for directoryname in result['Value']['Create']['Directories']: res = createLocalDirectory(dest_dir + "/" + directoryname) if not res['OK']: gLogger.fatal('Creation of directory: ' + directoryname + ' -X- [FAILED] ' + res['Message']) DIRAC.exit(1) else: gLogger.notice("Creating " + directoryname + " -> [DONE]") listOfFiles = result['Value']['Create']['Files'] # Chech that we do not have to many threads if nthreads > len(listOfFiles): nthreads = len(listOfFiles) if nthreads == 0: return S_OK('Upload finished successfully') listOfListOfFiles = chunkList(listOfFiles, nthreads) res = runInParallel( arguments=[dm, source_dir, dest_dir], listOfLists=listOfListOfFiles, function=downloadListOfFiles, ) if not res['OK']: return S_ERROR("Download of files failed") return S_OK('Upload finished successfully') def chunkList(alist, nchunks): """ Split a list into a list of equaliy sized lists """ avg = len(alist) / float(nchunks) out = [] last = 0.0 while last < len(alist): out.append(alist[int(last):int(last + avg)]) last += avg return out def downloadListOfFiles(dm, source_dir, dest_dir, listOfFiles, tID): """ Wrapper for multithreaded downloading of a list of files """ log = gLogger.getSubLogger("[Thread %s] " % tID) threadLine = "[Thread %s]" % tID for filename in listOfFiles: res = downloadRemoteFile( dm, source_dir + "/" + filename, dest_dir + ("/" + filename).rsplit("/", 1)[0]) if not res['OK']: log.fatal(threadLine + ' Downloading ' + filename + ' -X- [FAILED] ' + res['Message']) DIRAC.exit(1) else: log.notice(threadLine + " Downloading " + filename + " -> [DONE]") def runInParallel(arguments, listOfLists, function): """ Helper for execution of uploads and downloads in parallel """ from multiprocessing import Process processes = [] for tID, alist in enumerate(listOfLists): argums = arguments + [alist] + [tID] pro = Process(target=function, args=argums) pro.start() processes.append(pro) for process in processes: process.join() for process in processes: if process.exitcode == 1: return S_ERROR() return S_OK() def syncDestinations(upload, source_dir, dest_dir, storage, delete, nthreads): """ Top level wrapper to execute functions """ fc = FileCatalog() dm = DataManager() result = getContentToSync(upload, fc, source_dir, dest_dir) if not result['OK']: return S_ERROR(result['Message']) if upload: res = doUpload(fc, dm, result, source_dir, dest_dir, storage, delete, nthreads) if not res['OK']: return S_ERROR('Upload failed: ' + res['Message']) else: res = doDownload(dm, result, source_dir, dest_dir, delete, nthreads) if not res['OK']: return S_ERROR('Download failed: ' + res['Message']) return S_OK('Mirroring successfully finished') def run(parameters, delete, nthreads): """ The main user interface """ source_dir = parameters[0] dest_dir = parameters[1] upload = False storage = None if len(parameters) == 3: storage = parameters[2] source_dir = os.path.abspath(source_dir) dest_dir = dest_dir.rstrip('/') upload = True if not os.path.isdir(source_dir): gLogger.fatal("Source directory does not exist") DIRAC.exit(1) if len(parameters) == 2: dest_dir = os.path.abspath(dest_dir) source_dir = source_dir.rstrip('/') if not os.path.isdir(dest_dir): gLogger.fatal("Destination directory does not exist") DIRAC.exit(1) res = syncDestinations(upload, source_dir, dest_dir, storage, delete, nthreads) if not res['OK']: return S_ERROR(res['Message']) return S_OK("Successfully mirrored " + source_dir + " into " + dest_dir) returnValue = run(args, sync, parallel) if not returnValue['OK']: gLogger.fatal(returnValue['Message']) DIRAC.exit(1) else: gLogger.notice(returnValue['Value']) DIRAC.exit(0)
try: self.pingsToDo = max(1, int(value)) except ValueError: return S_ERROR("Number of pings to do has to be a number") return S_OK() # Instantiate the params class cliParams = Params() # Register accepted switches and their callbacks Script.registerSwitch("r", "showRaw", "show raw result from the query", cliParams.setRawResult) Script.registerSwitch("p:", "numPings=", "Number of pings to do (by default 1)", cliParams.setNumOfPingsToDo) # Define a help message Script.setUsageMessage('\n'.join([__doc__, 'Usage:', ' %s [option|cfgfile] <system name to ping>+' % Script.scriptName, ' Specifying a system is mandatory'])) # Parse the command line and initialize DIRAC Script.parseCommandLine(ignoreErrors=False) # Get the list of services servicesList = Script.getPositionalArgs() # Check and process the command line switches and options if not servicesList: Script.showHelp() DIRACExit(1)
def main(): global overwrite global specialOptions global module global specialOptions from DIRAC.FrameworkSystem.Client.ComponentInstaller import gComponentInstaller gComponentInstaller.exitOnError = True Script.registerSwitch("w", "overwrite", "Overwrite the configuration in the global CS", setOverwrite) Script.registerSwitch("m:", "module=", "Python module name for the component code", setModule) Script.registerSwitch("p:", "parameter=", "Special component option ", setSpecialOption) Script.parseCommandLine() args = Script.getPositionalArgs() if len(args) == 1: args = args[0].split('/') if len(args) != 2: Script.showHelp() DIRACexit(1) system = args[0] component = args[1] compOrMod = module if module else component result = gComponentInstaller.addDefaultOptionsToCS( gConfig, 'service', system, component, getCSExtensions(), specialOptions=specialOptions, overwrite=overwrite) if not result['OK']: gLogger.error(result['Message']) DIRACexit(1) result = gComponentInstaller.addTornadoOptionsToCS(gConfig) if not result['OK']: gLogger.error(result['Message']) DIRACexit(1) result = gComponentInstaller.installTornado() if not result['OK']: gLogger.error(result['Message']) DIRACexit(1) gLogger.notice( 'Successfully installed component %s in %s system, now setting it up' % (component, system)) result = gComponentInstaller.setupTornadoService(system, component, getCSExtensions(), module) if not result['OK']: gLogger.error(result['Message']) DIRACexit(1) result = MonitoringUtilities.monitorInstallation('service', system, component, module) if not result['OK']: gLogger.error(result['Message']) DIRACexit(1) gLogger.notice('Successfully completed the installation of %s/%s' % (system, component)) DIRACexit()
def main(): from DIRAC.Core.Base import Script Script.registerSwitch( "p:", "run_number=", "Run Number", setRunNumber ) Script.registerSwitch( "R:", "run=", "Run", setRun ) Script.registerSwitch( "P:", "config_path=", "Config Path", setConfigPath ) Script.registerSwitch( "T:", "template=", "Template", setTemplate ) Script.registerSwitch( "E:", "executable=", "Executable", setExecutable ) Script.registerSwitch( "V:", "version=", "Version", setVersion ) Script.registerSwitch( "M:", "mode=", "Mode", setMode ) Script.parseCommandLine( ignoreErrors = True ) args = Script.getPositionalArgs() if len( args ) < 1: Script.showHelp() if version == None or executable == None or run_number == None or run == None or template == None: Script.showHelp() jobReport.setApplicationStatus('Options badly specified') DIRAC.exit( -1 ) from CTADIRAC.Core.Workflow.Modules.CorsikaApp import CorsikaApp from CTADIRAC.Core.Utilities.SoftwareInstallation import checkSoftwarePackage from CTADIRAC.Core.Utilities.SoftwareInstallation import installSoftwarePackage from CTADIRAC.Core.Utilities.SoftwareInstallation import installSoftwareEnviron from CTADIRAC.Core.Utilities.SoftwareInstallation import localArea from CTADIRAC.Core.Utilities.SoftwareInstallation import sharedArea from CTADIRAC.Core.Utilities.SoftwareInstallation import workingArea from DIRAC.Core.Utilities.Subprocess import systemCall from DIRAC.WorkloadManagementSystem.Client.JobReport import JobReport jobID = os.environ['JOBID'] jobID = int( jobID ) jobReport = JobReport( jobID ) CorsikaSimtelPack = 'corsika_simhessarray/' + version + '/corsika_simhessarray' packs = [CorsikaSimtelPack] for package in packs: DIRAC.gLogger.notice( 'Checking:', package ) if sharedArea: if checkSoftwarePackage( package, sharedArea() )['OK']: DIRAC.gLogger.notice( 'Package found in Shared Area:', package ) installSoftwareEnviron( package, workingArea() ) packageTuple = package.split('/') corsika_subdir = sharedArea() + '/' + packageTuple[0] + '/' + version cmd = 'cp -r ' + corsika_subdir + '/* .' os.system(cmd) continue if workingArea: if checkSoftwarePackage( package, workingArea() )['OK']: DIRAC.gLogger.notice( 'Package found in Local Area:', package ) continue if installSoftwarePackage( package, workingArea() )['OK']: ############## compile ############################# if version == 'clean_23012012': cmdTuple = ['./build_all','ultra','qgs2'] elif version in ['prod-2_21122012','prod-2_08032013','prod-2_06052013']: cmdTuple = ['./build_all','prod2','qgs2'] ret = systemCall( 0, cmdTuple, sendOutput) if not ret['OK']: DIRAC.gLogger.error( 'Failed to execute build') DIRAC.exit( -1 ) continue DIRAC.gLogger.error( 'Check Failed for software package:', package ) DIRAC.gLogger.error( 'Software package not available') DIRAC.exit( -1 ) cs = CorsikaApp() cs.setSoftwarePackage(CorsikaSimtelPack) cs.csExe = executable cs.csArguments = ['--run-number',run_number,'--run',run,template] corsikaReturnCode = cs.execute() if corsikaReturnCode != 0: DIRAC.gLogger.error( 'Failed to execute corsika Application') jobReport.setApplicationStatus('Corsika Application: Failed') DIRAC.exit( -1 ) ###### rename corsika file ################################# rundir = 'run' + run_number corsikaKEYWORDS = ['TELFIL'] dictCorsikaKW = fileToKWDict(template,corsikaKEYWORDS) corsikafilename = rundir + '/' + dictCorsikaKW['TELFIL'][0] destcorsikafilename = 'corsika_run' + run_number + '.corsika.gz' cmd = 'mv ' + corsikafilename + ' ' + destcorsikafilename os.system(cmd) ### create corsika tar #################### corsika_tar = 'corsika_run' + run_number + '.tar.gz' filetar1 = rundir + '/'+'input' filetar2 = rundir + '/'+ 'DAT' + run_number + '.dbase' filetar3 = rundir + '/run' + str(int(run_number)) + '.log' cmdTuple = ['/bin/tar','zcf',corsika_tar, filetar1,filetar2,filetar3] DIRAC.gLogger.notice( 'Executing command tuple:', cmdTuple ) ret = systemCall( 0, cmdTuple, sendOutput) if not ret['OK']: DIRAC.gLogger.error( 'Failed to execute tar') DIRAC.exit( -1 ) DIRAC.exit()
def main(): global hostName global hostDN global hostProperties Script.registerSwitch('H:', 'HostName:', 'Name of the Host (Mandatory)', setHostName) Script.registerSwitch('D:', 'HostDN:', 'DN of the Host Certificate (Mandatory)', setHostDN) Script.registerSwitch( 'P:', 'Property:', 'Property to be added to the Host (Allow Multiple instances or None)', addProperty) Script.parseCommandLine(ignoreErrors=True) if hostName is None or hostDN is None: Script.showHelp(exitCode=1) args = Script.getPositionalArgs() from DIRAC.Interfaces.API.DiracAdmin import DiracAdmin diracAdmin = DiracAdmin() exitCode = 0 errorList = [] hostProps = {'DN': hostDN} if hostProperties: hostProps['Properties'] = ', '.join(hostProperties) for prop in args: pl = prop.split("=") if len(pl) < 2: errorList.append( ("in arguments", "Property %s has to include a '=' to separate name from value" % prop)) exitCode = 255 else: pName = pl[0] pValue = "=".join(pl[1:]) Script.gLogger.info("Setting property %s to %s" % (pName, pValue)) hostProps[pName] = pValue if not diracAdmin.csModifyHost( hostName, hostProps, createIfNonExistant=True)['OK']: errorList.append(("add host", "Cannot register host %s" % hostName)) exitCode = 255 else: result = diracAdmin.csCommitChanges() if not result['OK']: errorList.append(("commit", result['Message'])) exitCode = 255 if exitCode == 0: from DIRAC.FrameworkSystem.Client.ComponentMonitoringClient import ComponentMonitoringClient cmc = ComponentMonitoringClient() ret = cmc.hostExists(dict(HostName=hostName)) if not ret['OK']: Script.gLogger.error( 'Cannot check if host is registered in ComponentMonitoring', ret['Message']) elif ret['Value']: Script.gLogger.info( 'Host already registered in ComponentMonitoring') else: ret = cmc.addHost(dict(HostName=hostName, CPU='TO_COME')) if not ret['OK']: Script.gLogger.error( 'Failed to add Host to ComponentMonitoring', ret['Message']) for error in errorList: Script.gLogger.error("%s: %s" % error) DIRAC.exit(exitCode)
def main(): from DIRAC import gLogger from DIRAC.Core.Base import Script Script.parseCommandLine(ignoreErrors=True) from CTADIRAC.Core.Utilities.SoftwareInstallation import checkSoftwarePackage from CTADIRAC.Core.Utilities.SoftwareInstallation import installSoftwarePackage from CTADIRAC.Core.Utilities.SoftwareInstallation import installSoftwareEnviron from CTADIRAC.Core.Utilities.SoftwareInstallation import localArea from CTADIRAC.Core.Utilities.SoftwareInstallation import sharedArea from CTADIRAC.Core.Utilities.SoftwareInstallation import getSoftwareEnviron from CTADIRAC.Core.Utilities.SoftwareInstallation import workingArea from DIRAC.Core.Utilities.Subprocess import systemCall args = Script.getPositionalArgs() version = args[0] CorsikaSimtelPack = 'corsika_simhessarray/' + version + '/corsika_simhessarray' packs = [CorsikaSimtelPack] for package in packs: DIRAC.gLogger.notice('Checking:', package) if sharedArea: if checkSoftwarePackage(package, sharedArea())['OK']: DIRAC.gLogger.notice('Package found in Shared Area:', package) installSoftwareEnviron(package, workingArea()) packageTuple = package.split('/') corsika_subdir = sharedArea( ) + '/' + packageTuple[0] + '/' + version cmd = 'cp -u -r ' + corsika_subdir + '/* .' os.system(cmd) continue if workingArea: if checkSoftwarePackage(package, workingArea())['OK']: DIRAC.gLogger.notice('Package found in Local Area:', package) continue if installSoftwarePackage(package, workingArea())['OK']: ############## compile ############################# if version == 'clean_23012012': cmdTuple = ['./build_all', 'ultra', 'qgs2'] elif version in [ 'prod-2_21122012', 'prod-2_08032013', 'prod-2_06052013' ]: cmdTuple = ['./build_all', 'prod2', 'qgs2'] ret = systemCall(0, cmdTuple, sendOutput) if not ret['OK']: DIRAC.gLogger.error('Failed to compile') DIRAC.exit(-1) continue DIRAC.gLogger.error('Check Failed for software package:', package) DIRAC.gLogger.error('Software package not available') DIRAC.exit(-1) ret = getSoftwareEnviron(CorsikaSimtelPack) if not ret['OK']: error = ret['Message'] DIRAC.gLogger.error(error, CorsikaSimtelPack) DIRAC.exit(-1) corsikaEnviron = ret['Value'] executable_file = args[1] cmd = 'chmod u+x ' + executable_file os.system(cmd) cmdTuple = args[1:] DIRAC.gLogger.notice('Executing command tuple:', cmdTuple) ret = systemCall(0, cmdTuple, sendOutput, env=corsikaEnviron) if not ret['OK']: DIRAC.gLogger.error('Failed to execute read_hess:', ret['Message']) DIRAC.exit(-1) status, stdout, stderr = ret['Value'] if status: DIRAC.gLogger.error('read_hess execution reports Error:', status) DIRAC.gLogger.error(stdout) DIRAC.gLogger.error(stderr) DIRAC.exit(-1) DIRAC.exit()
def main(): from DIRAC.Core.Base.Script import parseCommandLine parseCommandLine() import DIRAC from DIRAC import gLogger args = Script.getPositionalArgs() requestName = None LFN = None PFN = None targetSE = None if len(args) != 4: Script.showHelp() else: requestName = args[0] LFN = args[1] PFN = args[2] targetSE = args[3] if not os.path.isabs(LFN): gLogger.error("LFN should be absolute path!!!") DIRAC.exit(-1) gLogger.info("will create request '%s' with 'PutAndRegister' " "operation using %s pfn and %s target SE" % (requestName, PFN, targetSE)) from DIRAC.RequestManagementSystem.Client.Request import Request from DIRAC.RequestManagementSystem.Client.Operation import Operation from DIRAC.RequestManagementSystem.Client.File import File from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient from DIRAC.Core.Utilities.Adler import fileAdler if not os.path.exists(PFN): gLogger.error("%s does not exist" % PFN) DIRAC.exit(-1) if not os.path.isfile(PFN): gLogger.error("%s is not a file" % PFN) DIRAC.exit(-1) PFN = os.path.abspath(PFN) size = os.path.getsize(PFN) adler32 = fileAdler(PFN) request = Request() request.RequestName = requestName putAndRegister = Operation() putAndRegister.Type = "PutAndRegister" putAndRegister.TargetSE = targetSE opFile = File() opFile.LFN = LFN opFile.PFN = PFN opFile.Size = size opFile.Checksum = adler32 opFile.ChecksumType = "ADLER32" putAndRegister.addFile(opFile) request.addOperation(putAndRegister) reqClient = ReqClient() putRequest = reqClient.putRequest(request) if not putRequest["OK"]: gLogger.error("unable to put request '%s': %s" % (requestName, putRequest["Message"])) DIRAC.exit(-1) gLogger.always("Request '%s' has been put to ReqDB for execution." % requestName) gLogger.always( "You can monitor its status using command: 'dirac-rms-request %s'" % requestName) DIRAC.exit(0)
def main(): from DIRAC.Core.Base import Script #### eventio_cta options ########################################## Script.registerSwitch("I:", "infile=", "Input file", setInfile) Script.registerSwitch("O:", "outfile=", "Output file", setOutfile) Script.registerSwitch("T:", "tellist=", "Tellist", setTellist) Script.registerSwitch("F:", "Nfirst_mcevt=", "Nfirst_mcevt", setNfirst_mcevt) Script.registerSwitch("L:", "Nlast_mcevt=", "Nlast_mcevt", setNlast_mcevt) Script.registerSwitch("P:", "pixelslices=", "setPixelslices (true/false)", setPixelslices) ### other options ############################################### Script.registerSwitch("V:", "version=", "HAP version", setVersion) Script.parseCommandLine(ignoreErrors=True) args = Script.getPositionalArgs() if len(args) < 1: Script.showHelp() if outfile == None or infile == None or tellist == None or version == None: Script.showHelp() jobReport.setApplicationStatus('Options badly specified') DIRAC.exit(-1) from CTADIRAC.Core.Workflow.Modules.HapApplication import HapApplication from CTADIRAC.Core.Workflow.Modules.HapRootMacro import HapRootMacro from CTADIRAC.Core.Utilities.SoftwareInstallation import checkSoftwarePackage from CTADIRAC.Core.Utilities.SoftwareInstallation import installSoftwarePackage from CTADIRAC.Core.Utilities.SoftwareInstallation import getSoftwareEnviron from CTADIRAC.Core.Utilities.SoftwareInstallation import localArea from CTADIRAC.Core.Utilities.SoftwareInstallation import sharedArea from DIRAC.Core.Utilities.Subprocess import systemCall from DIRAC.WorkloadManagementSystem.Client.JobReport import JobReport jobID = os.environ['JOBID'] jobID = int(jobID) jobReport = JobReport(jobID) HapPack = 'HAP/' + version + '/HAP' packs = ['HESS/v0.2/lib', 'HESS/v0.3/root', HapPack] for package in packs: DIRAC.gLogger.notice('Checking:', package) if sharedArea: if checkSoftwarePackage(package, sharedArea())['OK']: DIRAC.gLogger.notice('Package found in Shared Area:', package) continue if localArea: if checkSoftwarePackage(package, localArea())['OK']: DIRAC.gLogger.notice('Package found in Local Area:', package) continue if installSoftwarePackage(package, localArea())['OK']: continue DIRAC.gLogger.error('Check Failed for software package:', package) DIRAC.gLogger.error('Software package not available') DIRAC.exit(-1) telconf = os.path.join(localArea(), 'HAP/%s/config/%s' % (version, tellist)) ha = HapApplication() ha.setSoftwarePackage(HapPack) ha.hapExecutable = 'eventio_cta' ha.hapArguments = ['-file', infile, '-o', outfile, '-tellist', telconf] try: ha.hapArguments.extend( ['-Nfirst_mcevt', Nfirst_mcevt, '-Nlast_mcevt', Nlast_mcevt]) except NameError: DIRAC.gLogger.info('Nfirst_mcevt/Nlast_mcevt options are not used') try: if (pixelslices == 'true'): ha.hapArguments.extend(['-pixelslices']) except NameError: DIRAC.gLogger.info('pixelslices option is not used') DIRAC.gLogger.notice('Executing Hap Converter Application') res = ha.execute() if not res['OK']: DIRAC.gLogger.error('Failed to execute eventio_cta Application') jobReport.setApplicationStatus('eventio_cta: Failed') DIRAC.exit(-1) if not os.path.isfile(outfile): error = 'raw file was not created:' DIRAC.gLogger.error(error, outfile) jobReport.setApplicationStatus('eventio_cta: RawData not created') DIRAC.exit(-1) ###################### Check RAW DATA ####################### hr = HapRootMacro() hr.setSoftwarePackage(HapPack) DIRAC.gLogger.notice('Executing RAW check step0') hr.rootMacro = '/hapscripts/dst/Open_Raw.C+' outfilestr = '"' + outfile + '"' args = [outfilestr] DIRAC.gLogger.notice('Open_Raw macro Arguments:', args) hr.rootArguments = args DIRAC.gLogger.notice('Executing Hap Open_Raw macro') res = hr.execute() if not res['OK']: DIRAC.gLogger.error('Open_Raw: Failed') DIRAC.exit(-1) #########################Quality Check for raw Output File: step1#################### DIRAC.gLogger.notice('Executing Raw Check step1') ret = getSoftwareEnviron(HapPack) if not ret['OK']: error = ret['Message'] DIRAC.gLogger.error(error, HapPack) DIRAC.exit(-1) hapEnviron = ret['Value'] hessroot = hapEnviron['HESSROOT'] check_script = hessroot + '/hapscripts/dst/check_raw.csh' cmdTuple = [check_script] ret = systemCall(0, cmdTuple, sendOutput) if not ret['OK']: DIRAC.gLogger.error('Failed to execute RAW Check step1') jobReport.setApplicationStatus('Check_raw: Failed') DIRAC.exit(-1) status, stdout, stderr = ret['Value'] if status == 1: jobReport.setApplicationStatus( 'RAW Check step1: Big problem during RAW production') DIRAC.gLogger.error('Check_raw: Big problem during RAW production') DIRAC.exit(-1) DIRAC.exit()
def main(): global fullMatch global sites Script.registerSwitch("F", "full-match", "Check all the matching criteria", setFullMatch) Script.registerSwitch( "S:", "site=", "Check matching for these sites (comma separated list)", setSites) Script.parseCommandLine(ignoreErrors=True) args = Script.getPositionalArgs() if len(args) == 0: gLogger.error("Error: No job description provided") Script.showHelp(exitCode=1) from DIRAC.Core.Security.ProxyInfo import getVOfromProxyGroup from DIRAC.ConfigurationSystem.Client.Helpers import Resources from DIRAC.Core.Utilities.PrettyPrint import printTable from DIRAC.ResourceStatusSystem.Client.ResourceStatus import ResourceStatus from DIRAC.ResourceStatusSystem.Client.SiteStatus import SiteStatus from DIRAC.WorkloadManagementSystem.Utilities.QueueUtilities import getQueuesResolved, matchQueue with open(args[0]) as f: jdl = f.read() # Get the current VO result = getVOfromProxyGroup() if not result['OK']: gLogger.error('No proxy found, please login') DIRACExit(-1) voName = result['Value'] resultQueues = Resources.getQueues(siteList=sites, community=voName) if not resultQueues['OK']: gLogger.error('Failed to get CE information') DIRACExit(-1) siteDict = resultQueues['Value'] result = getQueuesResolved(siteDict) if not resultQueues['OK']: gLogger.error('Failed to get CE information') DIRACExit(-1) queueDict = result['Value'] # get list of usable sites within this cycle resultMask = SiteStatus().getUsableSites() if not resultMask['OK']: gLogger.error('Failed to get Site mask information') DIRACExit(-1) siteMaskList = resultMask.get('Value', []) rssClient = ResourceStatus() fields = ('Site', 'CE', 'Queue', 'Status', 'Match', 'Reason') records = [] for queue, queueInfo in queueDict.items(): site = queueInfo['Site'] ce = queueInfo['CEName'] siteStatus = "Active" if site in siteMaskList else "InActive" ceStatus = siteStatus if rssClient.rssFlag: result = rssClient.getElementStatus(ce, "ComputingElement") if result['OK']: ceStatus = result['Value'][ce]['all'] result = matchQueue(jdl, queueInfo, fullMatch=fullMatch) if not result['OK']: gLogger.error('Failed in getting match data', result['Message']) DIRACExit(-1) status = "Active" if siteStatus == "Active" and ceStatus == "Active" else "Inactive" if result['Value']['Match']: records.append((site, ce, queueInfo['Queue'], status, 'Yes', '')) else: records.append((site, ce, queueInfo['Queue'], status, 'No', result['Value']['Reason'])) gLogger.notice( printTable(fields, records, sortField='Site', columnSeparator=' ', printOut=False))
#!/usr/bin/env python #-*- coding:utf-8 -*- """ List the Files in the Dataset (in DFC) """ import os os.environ['TERM'] = 'linux' import DIRAC from DIRAC.Core.Base import Script Script.parseCommandLine(ignoreErrors=True) datasets = Script.getPositionalArgs() if len(datasets)==0: DIRAC.exit(-1) from BESDIRAC.Badger.API.Badger import Badger badger = Badger() for ds in datasets: result = badger.getFilesByDatasetName(ds) if result and isinstance(result, list): for lfn in result: print lfn
def main(): Script.registerSwitch("", "Status=", "Primary status") Script.registerSwitch("", "MinorStatus=", "Secondary status") Script.registerSwitch("", "ApplicationStatus=", "Application status") Script.registerSwitch("", "Site=", "Execution site") Script.registerSwitch("", "Owner=", "Owner (DIRAC nickname)") Script.registerSwitch("", "JobGroup=", "Select jobs for specified job group") Script.registerSwitch( "", "Date=", "Date in YYYY-MM-DD format, if not specified default is today") Script.registerSwitch("", "File=", "File name,if not specified default is std.out ") Script.parseCommandLine(ignoreErrors=True) args = Script.getPositionalArgs() # Default values status = None minorStatus = None appStatus = None site = None owner = None jobGroup = None date = None filename = 'std.out' if len(args) != 1: Script.showHelp() searchstring = str(args[0]) for switch in Script.getUnprocessedSwitches(): if switch[0].lower() == "status": status = switch[1] elif switch[0].lower() == "minorstatus": minorStatus = switch[1] elif switch[0].lower() == "applicationstatus": appStatus = switch[1] elif switch[0].lower() == "site": site = switch[1] elif switch[0].lower() == "owner": owner = switch[1] elif switch[0].lower() == "jobgroup": jobGroup = switch[1] elif switch[0].lower() == "date": date = switch[1] elif switch[0].lower() == "file": filename = switch[1] selDate = date if not date: selDate = 'Today' from DIRAC.Interfaces.API.Dirac import Dirac dirac = Dirac() exitCode = 0 errorList = [] resultDict = {} result = dirac.selectJobs(status=status, minorStatus=minorStatus, applicationStatus=appStatus, site=site, owner=owner, jobGroup=jobGroup, date=date) if result['OK']: jobs = result['Value'] else: print("Error in selectJob", result['Message']) DIRAC.exit(2) for job in jobs: result = dirac.getOutputSandbox(job) if result['OK']: if os.path.exists('%s' % job): lines = [] try: lines = open(os.path.join(job, filename)).readlines() except Exception as x: errorList.append((job, x)) for line in lines: if line.count(searchstring): resultDict[job] = line rmtree("%s" % (job)) else: errorList.append((job, result['Message'])) exitCode = 2 for result in resultDict.items(): print(result) DIRAC.exit(exitCode)
import pprint import sys import urlparse import cgi from DIRAC import gLogger from DIRAC.Core.Base import Script from DIRAC.Core.Utilities.Plotting.FileCoding import extractRequestFromFileId Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1], 'Usage:', ' %s [option|cfgfile] ... URL ...' % Script.scriptName, 'Arguments:', ' URL: encoded URL of a DIRAC Accounting plot'] ) ) Script.parseCommandLine() fileIds = Script.getPositionalArgs() for fileId in fileIds: #Try to find if it's a url parseRes = urlparse.urlparse( fileId ) if parseRes.query: queryRes = cgi.parse_qs( parseRes.query ) if 'file' in queryRes: fileId = queryRes[ 'file' ][0] #Decode result = extractRequestFromFileId( fileId ) if not result[ 'OK' ]: gLogger.error( "Could not decode fileId", "'%s', error was %s" % ( fileId, result[ 'Message' ] ) ) sys.exit( 1 ) gLogger.notice( "Decode for '%s' is:\n%s" % ( fileId, pprint.pformat( result[ 'Value' ] ) ) )
_helper_create_query = _helper_fc_cli._FileCatalogClientCLI__createQuery def getLFNsByQueryString(query): metadataDict = _helper_create_query(query) lfns = _client.findFilesByMetadata(metadataDict,'/') if not lfns["OK"]: import sys sys.stderr.write(repr(lfns)) return list() return lfns["Value"] query_string = " ".join(Script.getPositionalArgs()) def createQuery(query): def yieldAll(query): for s in map(lambda x:x.strip(), query.split()): curop = None for op in ['>=','<=','>','<','!=','=']: pos = s.find(op) if pos == -1: continue curop = op if s[:pos]: yield s[:pos] if s[pos: pos+len(curop)]: yield s[pos: pos+len(curop)]
if not res['OK']: gLogger.error("Failed to get transformation problematic files", res['Message']) return S_ERROR() problematicFiles = res['Value'] if not problematicFiles: gLogger.notice("No problematic files found for transformation") return S_OK() for lfn in sortList(problematicFiles.keys()): prognosis = problematicFiles[lfn]['Prognosis'] problematicDict = problematicFiles[lfn] gLogger.notice("Prognosis is %s" % prognosis ) if not hasattr( integrityClient, methodToCall ): gLogger.notice( "DataIntegrityClient hasn't got '%s' member" % methodToCall ) continue fcn = getattr( integrityClient, methodToCall ) if not callable( fcn ): gLogger.notice( "DataIntegrityClient member '%s' isn't a method" % methodToCall ) continue ## results not checked??? Where is The Food? res = fcn( problematicDict ) gLogger.notice("Problematic files resolved for transformation %d" % transID) return S_OK() transIDs = [ int(x) for x in Script.getPositionalArgs() ] if not transIDs: gLogger.notice("Please supply transformationIDs as arguments") DIRAC.exit(0) for transID in transIDs: resolveTransforamtionProblematics(transID)
def __getTask(transID, taskID): res = transClient.getTransformationTasks({ 'TransformationID': transID, "TaskID": taskID }) if not res['OK'] or not res['Value']: return None return res['Value'][0] #==================================== if __name__ == "__main__": Script.parseCommandLine(ignoreErrors=True) transList = __getTransformations(Script.getPositionalArgs()) from LHCbDIRAC.TransformationSystem.Client.TransformationClient import TransformationClient from DIRAC import gLogger, exit transClient = TransformationClient() for transID in transList: res = transClient.getTransformationFiles({ 'TransformationID': transID, 'Status': 'Assigned' }) if not res['OK']: gLogger.fatal("Error getting transformation files for %d" % transID) continue targetStats = {}
from LHCbDIRAC.BookkeepingSystem.Client.BookkeepingClient import BookkeepingClient from LHCbDIRAC.BookkeepingSystem.Client.BKQuery import BKQuery Script.registerSwitch('', 'FileType=', 'FileType to search [ALLSTREAMS.DST]') Script.setUsageMessage('\n'.join([ __doc__.split('\n')[1], 'Usage:', ' %s [option] eventType ' % Script.scriptName ])) fileType = 'ALLSTREAMS.DST' Script.parseCommandLine(ignoreErrors=True) for switch in Script.getUnprocessedSwitches(): if switch[0] == "FileType": fileType = str(switch[1]) eventTypes = Script.getPositionalArgs()[0] bkQuery = BKQuery({ 'EventType': eventTypes, "ConfigName": "MC" }, fileTypes=fileType, visible=True) print "bkQuery:", bkQuery prods = bkQuery.getBKProductions() bk = BookkeepingClient() for prod in prods: res = bk.getProductionInformations(prod) if res['OK']: value = res['Value']
def main(): from DIRAC.Core.Base import Script #### eventio_cta options ########################################## Script.registerSwitch( "T:", "tellist=", "Tellist", setTellist ) Script.registerSwitch( "F:", "Nfirst_mcevt=", "Nfirst_mcevt", setNfirst_mcevt) Script.registerSwitch( "L:", "Nlast_mcevt=", "Nlast_mcevt", setNlast_mcevt) ## add other eventio_cta options ################################ # Script.registerSwitch( "N:", "num=", "Num", setNum) ## Script.registerSwitch( "L:", "limitmc=", "Limitmc", setLimitmc) # Script.registerSwitch( "S:", "telidoffset=", "Telidoffset", setTelidoffset) Script.registerSwitch( "P:", "pixelslices=", "setPixelslices (true/false)",setPixelslices) Script.registerSwitch( "p:", "run_number=", "Run Number (set automatically)", setRunNumber ) ### other options ############################################### Script.registerSwitch( "V:", "version=", "HAP version", setVersion ) Script.parseCommandLine( ignoreErrors = True ) args = Script.getPositionalArgs() if len( args ) < 1: Script.showHelp() if tellist == None or version == None: Script.showHelp() jobReport.setApplicationStatus('Options badly specified') DIRAC.exit( -1 ) from CTADIRAC.Core.Workflow.Modules.HapApplication import HapApplication from CTADIRAC.Core.Workflow.Modules.HapRootMacro import HapRootMacro from CTADIRAC.Core.Utilities.SoftwareInstallation import checkSoftwarePackage from CTADIRAC.Core.Utilities.SoftwareInstallation import installSoftwarePackage from CTADIRAC.Core.Utilities.SoftwareInstallation import getSoftwareEnviron from CTADIRAC.Core.Utilities.SoftwareInstallation import localArea from CTADIRAC.Core.Utilities.SoftwareInstallation import sharedArea from DIRAC.Core.Utilities.Subprocess import systemCall from DIRAC.WorkloadManagementSystem.Client.JobReport import JobReport jobID = os.environ['JOBID'] jobID = int( jobID ) jobReport = JobReport( jobID ) HapPack = 'HAP/' + version + '/HAP' packs = ['HESS/v0.2/lib','HESS/v0.3/root',HapPack] for package in packs: DIRAC.gLogger.notice( 'Checking:', package ) if sharedArea: if checkSoftwarePackage( package, sharedArea() )['OK']: DIRAC.gLogger.notice( 'Package found in Shared Area:', package ) continue if localArea: if checkSoftwarePackage( package, localArea() )['OK']: DIRAC.gLogger.notice( 'Package found in Local Area:', package ) continue if installSoftwarePackage( package, localArea() )['OK']: continue DIRAC.gLogger.error( 'Check Failed for software package:', package ) DIRAC.gLogger.error( 'Software package not available') DIRAC.exit( -1 ) telconf = os.path.join( localArea(),'HAP/%s/config/%s' % (version,tellist)) ha = HapApplication() ha.setSoftwarePackage(HapPack) ha.hapExecutable = 'eventio_cta' fileout = 'raw_' + part_type + '_run' + run_number + '.root' infile = build_infile() ha.hapArguments = ['-file', infile, '-o', fileout, '-tellist', telconf] try: ha.hapArguments.extend(['-Nfirst_mcevt', Nfirst_mcevt, '-Nlast_mcevt', Nlast_mcevt]) except NameError: DIRAC.gLogger.info( 'Nfirst_mcevt/Nlast_mcevt options are not used' ) try: if(pixelslices == 'true'): ha.hapArguments.extend(['-pixelslices']) except NameError: DIRAC.gLogger.info( 'pixelslices option is not used' ) DIRAC.gLogger.notice( 'Executing Hap Converter Application' ) res = ha.execute() if not res['OK']: DIRAC.gLogger.error( 'Failed to execute eventio_cta Application') jobReport.setApplicationStatus('eventio_cta: Failed') DIRAC.exit( -1 ) if not os.path.isfile(fileout): error = 'raw file was not created:' DIRAC.gLogger.error( error, fileout ) jobReport.setApplicationStatus('eventio_cta: RawData not created') DIRAC.exit( -1 ) ###################### Check RAW DATA ####################### hr = HapRootMacro() hr.setSoftwarePackage(HapPack) DIRAC.gLogger.notice('Executing RAW check step0') hr.rootMacro = '/hapscripts/dst/Open_Raw.C+' outfilestr = '"' + fileout + '"' args = [outfilestr] DIRAC.gLogger.notice( 'Open_Raw macro Arguments:', args ) hr.rootArguments = args DIRAC.gLogger.notice( 'Executing Hap Open_Raw macro') res = hr.execute() if not res['OK']: DIRAC.gLogger.error( 'Open_Raw: Failed' ) DIRAC.exit( -1 ) #################Check stdout of 'Open_Raw.C macro ############################### DIRAC.gLogger.notice('Executing Raw Check step1') ret = getSoftwareEnviron(HapPack) if not ret['OK']: error = ret['Message'] DIRAC.gLogger.error( error, HapPack) DIRAC.exit( -1 ) hapEnviron = ret['Value'] hessroot = hapEnviron['HESSROOT'] check_script = hessroot + '/hapscripts/dst/check_raw.csh' cmdTuple = [check_script] ret = systemCall( 0, cmdTuple, sendOutput) if not ret['OK']: DIRAC.gLogger.error( 'Failed to execute RAW Check step1') jobReport.setApplicationStatus('Check_raw: Failed') DIRAC.exit( -1 ) status, stdout, stderr = ret['Value'] if status==1: jobReport.setApplicationStatus('RAW Check step1: Big problem during RAW production') DIRAC.gLogger.error( 'Check_raw: Big problem during RAW production' ) DIRAC.exit( -1 ) ############## DST production #######################" hr = HapRootMacro() hr.setSoftwarePackage(HapPack) infile = build_infile() infilestr = '"' + fileout + '"' telconfstr = '"' + telconf + '"' args = [str(int(run_number)), infilestr, telconfstr] try: args.extend([nevent]) except NameError: DIRAC.gLogger.info( 'nevent arg not used' ) DIRAC.gLogger.notice( 'make_CTA_DST macro Arguments:', args ) hr.rootMacro = '/hapscripts/dst/make_CTA_DST.C+' hr.rootArguments = args DIRAC.gLogger.notice( 'Executing Hap make_CTA_DST macro' ) res = hr.execute() if not res['OK']: DIRAC.gLogger.error( 'Failed to execute make_CTA_DST macro') jobReport.setApplicationStatus('Failure during make_CTA_DST') DIRAC.exit( -1 ) ############ check existance of output file #### filedst = 'dst_CTA_%08d' % int(run_number) + '.root' if not os.path.isfile(filedst): DIRAC.gLogger.error('dst file not found:', filedst ) jobReport.setApplicationStatus('make_CTA_DST.C: DST file not created') DIRAC.exit( -1 ) fileout = 'dst_' + part_type + '_run' + run_number + '.root' cmd = 'mv ' + filedst + ' ' + fileout os.system(cmd) #####################Check stdout ########################### DIRAC.gLogger.notice('Executing DST Check step0') check_script = hessroot + '/hapscripts/dst/check_dst0.csh' cmdTuple = [check_script] ret = systemCall( 0, cmdTuple, sendOutput) if not ret['OK']: DIRAC.gLogger.error( 'Failed to execute DST Check step0') jobReport.setApplicationStatus('Check_dst0: Failed') DIRAC.exit( -1 ) status, stdout, stderr = ret['Value'] if status==1: jobReport.setApplicationStatus('Check_dst0: Big problem during the DST production') DIRAC.gLogger.error( 'DST Check step0 reports: Big problem during the DST production' ) DIRAC.exit( -1 ) if status==2: jobReport.setApplicationStatus('Check_dst0: No triggered events') DIRAC.gLogger.notice( 'DST Check step0 reports: No triggered events' ) DIRAC.exit( ) ############# run the CheckDST macro ################# DIRAC.gLogger.notice('Executing DST check step1') hr.rootMacro = '/hapscripts/dst/CheckDST.C+' fileoutstr = '"' + fileout + '"' args = [fileoutstr] DIRAC.gLogger.notice( 'CheckDST macro Arguments:', args ) hr.rootArguments = args DIRAC.gLogger.notice( 'Executing Hap CheckDST macro') res = hr.execute() if not res['OK']: DIRAC.gLogger.error( 'Failure during DST Check step1' ) jobReport.setApplicationStatus('Check_dst1: Failed') DIRAC.exit( -1 ) #######################Check stdout of CheckDST.C macro ########################## DIRAC.gLogger.notice('Executing DST Check step2') check_script = hessroot + '/hapscripts/dst/check_dst2.csh' cmdTuple = [check_script] ret = systemCall( 0, cmdTuple, sendOutput ) if not ret['OK']: DIRAC.gLogger.error( 'Failed to execute DST Check step2') jobReport.setApplicationStatus('Check_dst2: Failed') DIRAC.exit( -1 ) status, stdout, stderr = ret['Value'] if status==1: jobReport.setApplicationStatus('DST Check step2: Big problem during the DST production') DIRAC.gLogger.error( 'DST Check step2 reports: Big problem during the DST production' ) DIRAC.exit( -1 ) if status==2: jobReport.setApplicationStatus('DST Check step2: No triggered events') DIRAC.gLogger.notice( 'DST Check step2 reports: No triggered events' ) DIRAC.exit( ) DIRAC.exit()
import pprint import sys import urlparse import cgi from DIRAC import gLogger from DIRAC.Core.Base import Script from DIRAC.AccountingSystem.private.FileCoding import extractRequestFromFileId Script.setUsageMessage('\n'.join([ __doc__.split('\n')[1], 'Usage:', ' %s [option|cfgfile] ... URL ...' % Script.scriptName, 'Arguments:', ' URL: encoded URL of a DIRAC Accounting plot' ])) Script.parseCommandLine() fileIds = Script.getPositionalArgs() for fileId in fileIds: #Try to find if it's a url parseRes = urlparse.urlparse(fileId) if parseRes.query: queryRes = cgi.parse_qs(parseRes.query) if 'file' in queryRes: fileId = queryRes['file'][0] #Decode result = extractRequestFromFileId(fileId) if not result['OK']: gLogger.error("Could not decode fileId", "'%s', error was %s" % (fileId, result['Message'])) sys.exit(1) gLogger.notice("Decode for '%s' is:\n%s" %
def main(): maxJobs = 100 Script.registerSwitch("", "Status=", "Primary status") Script.registerSwitch("", "MinorStatus=", "Secondary status") Script.registerSwitch("", "ApplicationStatus=", "Application status") Script.registerSwitch("", "Site=", "Execution site") Script.registerSwitch("", "Owner=", "Owner (DIRAC nickname)") Script.registerSwitch("", "JobGroup=", "Select jobs for specified job group") Script.registerSwitch( "", "Date=", "Date in YYYY-MM-DD format, if not specified default is today") Script.registerSwitch( "", "Maximum=", "Maximum number of jobs shown (default %d, 0 means all)" % maxJobs) Script.parseCommandLine(ignoreErrors=True) args = Script.getPositionalArgs() # Default values status = None minorStatus = None appStatus = None site = None owner = None jobGroups = [] date = None if args: Script.showHelp() exitCode = 0 for switch in Script.getUnprocessedSwitches(): if switch[0].lower() == "status": status = switch[1] elif switch[0].lower() == "minorstatus": minorStatus = switch[1] elif switch[0].lower() == "applicationstatus": appStatus = switch[1] elif switch[0].lower() == "site": site = switch[1] elif switch[0].lower() == "owner": owner = switch[1] elif switch[0].lower() == "jobgroup": for jg in switch[1].split(','): if jg.isdigit(): jobGroups.append('%08d' % int(jg)) else: jobGroups.append(jg) elif switch[0].lower() == "date": date = switch[1] elif switch[0] == 'Maximum': try: maxJobs = int(switch[1]) except TypeError: gLogger.fatal("Invalid max number of jobs", switch[1]) DIRAC.exit(1) selDate = date if not date: selDate = 'Today' conditions = { 'Status': status, 'MinorStatus': minorStatus, 'ApplicationStatus': appStatus, 'Owner': owner, 'JobGroup': ','.join(str(jg) for jg in jobGroups), 'Date': selDate } from DIRAC.Interfaces.API.Dirac import Dirac dirac = Dirac() jobs = [] if jobGroups: for jobGroup in jobGroups: res = dirac.selectJobs(status=status, minorStatus=minorStatus, applicationStatus=appStatus, site=site, owner=owner, jobGroup=jobGroup, date=date, printErrors=False) if res['OK']: jobs.extend(res['Value']) else: gLogger.error("Can't select jobs: ", res['Message']) else: res = dirac.selectJobs(status=status, minorStatus=minorStatus, applicationStatus=appStatus, site=site, owner=owner, date=date, printErrors=False) if res['OK']: jobs.extend(res['Value']) else: gLogger.error("Can't select jobs: ", res['Message']) conds = ['%s = %s' % (n, v) for n, v in conditions.items() if v] if maxJobs and len(jobs) > maxJobs: jobs = jobs[:maxJobs] constrained = ' (first %d shown) ' % maxJobs else: constrained = ' ' if jobs: gLogger.notice( '==> Selected %s jobs%swith conditions: %s\n%s' % (len(jobs), constrained, ', '.join(conds), ','.join(jobs))) else: gLogger.notice('No jobs were selected with conditions:', ', '.join(conds)) DIRAC.exit(exitCode)
from DIRAC.Core.Base import Script Script.setUsageMessage( """ Delete task and all jobs in the task Usage: %s [option] ... [TaskID] ... """ % Script.scriptName ) Script.parseCommandLine(ignoreErrors=False) options = Script.getUnprocessedSwitches() args = Script.getPositionalArgs() from BESDIRAC.WorkloadManagementSystem.Client.TaskClient import TaskClient taskClient = TaskClient() def deleteTask(taskID): result = taskClient.deleteTask(taskID) if not result["OK"]: print "Delete task error: %s" % result["Message"] return print "Task %s deleted" % taskID def main():
""" download a set of files as a dataset from SE to the current directory """ __RCSID__ = "$Id$" import DIRAC from DIRAC.Core.Base import Script Script.registerSwitch("s","datasetName","the dataset you want to download") Script.setUsageMessage('\n'.join([__doc__, 'Usage:', '%s dir'% Script.scriptName, 'Arguments:' ' datasetName: the dataset you want to download'])) Script.parseCommandLine(ignoreErrors=True) datasetName = Script.getPositionalArgs() #print dir if len(datasetName)!=1: Script.showHelp() from BESDIRAC.Badger.API.Badger import Badger badger = Badger() exitCode = 0 datasetName = datasetName[0] result = badger.dowloadFileByDatasetName(datasetName) if not result: print 'ERROR %s'%(result['Message']) exitCode = 1 DIRAC.exit(exitCode)