def parallelWorkflow(): pid = str(os.getpid()) import downloader maxParallelItem = 10 sleepTimeForWaitingChilds = 5 childs = list() previousMonitor = dict() previousMonitor['failed'] = list() previousMonitor['ok'] = list() q = libQueue.queue() condition = "STATUS !='%s' and STATUS !='%s' and PID is null and queue.LAST_UPDATE <(now() - INTERVAL 10 SECOND)" % ( ccatalogued, cnok) qItemList = q.search(condition) for qItem in qItemList: currMonitor = downloader.monitorChilds(childs) #wait for a free resource while (currMonitor['nRun'] >= maxParallelItem): print 'MAIN: waiting for childs: ' + str(currMonitor['running']) time.sleep(sleepTimeForWaitingChilds) currMonitor = downloader.monitorChilds(childs) #print the result of the last released resource #compare succeded processes for status in ['ok', 'failed']: new = currMonitor[status] newset = set(new) diff = newset.difference(previousMonitor[status]) for i in diff: print "MAIN: Completed " + status + " process " + str(i) previousMonitor = currMonitor try: part = re.search('\d{8}T\d{6}', qItem).group()[2:8] except: part = '000000' logfolder = "%s/log/prod/%s" % (prjFolder, part) if not os.path.exists(logfolder): os.makedirs(logfolder, 0777) logfile = "%s/%s.log" % (logfolder, qItem) cmd = pythonex + " %s/lib/libQueue.py --id %s 1>>%s 2>>%s" % ( prjFolder, qItem, logfile, logfile) print cmd newProc = subprocess.Popen(['/bin/sh', '-c', cmd]) proc = dict() proc['proc'] = newProc proc['id'] = id childs.append(proc) #Wait that all downlaod/subprocessed are completed noErrorFound = True for proc in childs: exitCode = proc['proc'].wait() if exitCode == 0: pass else: print "Workflow for product %s failed" % proc['id'] noErrorFound = False print noErrorFound
def getAllMetalinks(self): x = libQueue.queue() while (True): y = x.getItemForMetalinkDownload(str(os.getpid())) if y == '#': #no record found break try: self.getMetalink(y) except: pass
def testworkflow(): #check DB connection q=libQueue.queue() del q targets=dbif.getTargetList("type='dhus'") for itarget in targets: x=gmpPluginDhus(itarget) x.getPlan() #x.storePlan() del x
def getAllMetalinks(self): x=libQueue.queue() while(True): y=x.getItemForMetalinkDownload(str(os.getpid())) if y=='#': #no record found break try: self.getMetalink(y) except: pass
def main(): log(APPID + ' process starting') #Init the queue object x = libQueue.queue() #Init the rules try: rules = dbif.getRules() log('Rules succesfully loaded (%s rules found)' % str(len(rules))) except: log('Failed to load the rules') traceback.print_exc(logFile) for irule in rules: log("Applying rule %s: " % irule['id']) log(" condition: %s" % irule['condition']) log(" cliaction: %s" % irule['cliaction']) queuedItemsID = x.search(irule['condition'] + " and dwnstatus='C' and finstatus is null ") log(" found %s items" % len(queuedItemsID)) for queuedItemID in queuedItemsID: #Create the queueItem object starting from the ID try: queuedItem = libQueue.queuedItem(queuedItemID) queuedItem.getEmergencyDataset() log(" Found item %s; object succesfully loaded " % queuedItemID) except: log(" Found item %s; error in initializing the object" % queuedItemID) traceback.print_exc(logFile) #prepare the command line cli = irule['cliaction'] cli = mapcliParameters(queuedItem, cli) #cli=cli.replace('$ITEM',queuedItemID) #cli=cli.replace('$ALLMETADATA',pprint.pformat(queuedItem.__dict__)) #Invoke the command line log(" Invoking cli: %s" % cli) try: os.system(cli) #Flag the item as processed queuedItem.setFinStatus('OK') log(" Execution OK and item succesfully tagged as OK") except: queuedItem.setFinStatus('NOK') log(" Execution FAILED and item tagged as NOK") traceback.print_exc(logFile) pass pass log(APPID + ' process completed')
def testworkflow(): #check DB connection q = libQueue.queue() del q targets = dbif.getTargetList("type='dhus'") for itarget in targets: x = gmpPluginDhus(itarget) x.getPlan() #x.storePlan() del x
def storePlan(self): #store the plan into the DB print "Storing the plan into the DB" x=libQueue.queue() i=0 for plannedItem in self.plan: i+=1 try: x.addItem(plannedItem) except: print "Failed to import product %s" % plannedItem.ID traceback.print_exc(file=sys.stdout) return
def main(): log(APPID +' process starting') #Init the queue object x=libQueue.queue() #Init the rules try: rules=dbif.getRules() log('Rules succesfully loaded (%s rules found)' % str(len(rules))) except: log('Failed to load the rules') traceback.print_exc(logFile) for irule in rules: log("Applying rule %s: " % irule['id']) log(" condition: %s" % irule['condition']) log(" cliaction: %s" % irule['cliaction']) queuedItemsID=x.search(irule['condition']+" and dwnstatus='C' and finstatus is null ") log(" found %s items" % len(queuedItemsID)) for queuedItemID in queuedItemsID: #Create the queueItem object starting from the ID try: queuedItem=libQueue.queuedItem(queuedItemID) queuedItem.getEmergencyDataset() log(" Found item %s; object succesfully loaded " % queuedItemID) except: log(" Found item %s; error in initializing the object" % queuedItemID) traceback.print_exc(logFile) #prepare the command line cli=irule['cliaction'] cli=mapcliParameters(queuedItem, cli) #cli=cli.replace('$ITEM',queuedItemID) #cli=cli.replace('$ALLMETADATA',pprint.pformat(queuedItem.__dict__)) #Invoke the command line log(" Invoking cli: %s" %cli) try: os.system(cli) #Flag the item as processed queuedItem.setFinStatus('OK') log(" Execution OK and item succesfully tagged as OK") except: queuedItem.setFinStatus('NOK') log(" Execution FAILED and item tagged as NOK") traceback.print_exc(logFile) pass pass log(APPID +' process completed')
def addSingleODAProduct(productID, targetid): targets = dbif.getTargetList("id='%s'" % targetid) x = libQueue.queue() for itarget in targets: print "Processing %s" % itarget['id'] oda = gmpPluginOda(itarget) if 'SAFE' not in productID: productID += '.SAFE' try: newProduct = oda.createItem(productID) x.addItem(newProduct) return except: print "Failed to get metalink from ODA for product %s" % productID traceback.print_exc(file=sys.stdout) return print "ERROR: No target %s found" % targetid return
def mainworkflow(): #check DB connection q = libQueue.queue() del q targets = dbif.getTargetList("type='oda'") for itarget in targets: print "Processing %s" % itarget['id'] x = gmpPluginOda(itarget) try: x.getPlan() except: print "ERROR: Failed to get plan from target %s" % itarget traceback.print_exc(file=sys.stdout) try: x.storePlan() except: print "ERROR: Failed to store plan from target %s" % itarget traceback.print_exc(file=sys.stdout) del x
def mainworkflow(): #check DB connection q=libQueue.queue() del q targets=dbif.getTargetList("type='ftpz'") for itarget in targets: print "Processing %s" % itarget['id'] x=gmpPluginFTPZ(itarget) try: x.getPlan() except: print "ERROR: Failed to get plan from target %s" % itarget traceback.print_exc(file=sys.stdout) try: x.storePlan() except: print "ERROR: Failed to store plan from target %s" % itarget traceback.print_exc(file=sys.stdout) del x
def main(): #Get the first available item to be downloaded x = libQueue.queue() #resetDownloadQueue for debug purposes #x.resetDownloadQueue() y = x.getItemForDownload(str(os.getpid())) if y == '#': #no record found return #pprint.pprint(y.__dict__) log("Downloading %s" % y.id) #Redirecting the log #logFileName=prjFolder+'/log/downloads/'+ y.id + '.log' #logFile=open(logFileName,'w') #Get credential connection = getCredential(y.targetid)[0] #import pprint #pprint.pprint(connection) #Invoke agents for files to be downloaded previousMonitor = dict() previousMonitor['failed'] = list() previousMonitor['ok'] = list() for ifile in y.files: if ifile['dwnstatus'] != libQueue.cDwnStatusQueued: continue newid = str(ifile['fileid']) currMonitor = monitorChilds(childs) #wait for a free resource while (currMonitor['nRun'] >= maxDwnFilesPerItem): log('MAIN: waiting for childs: ' + str(currMonitor['running'])) time.sleep(sleepTimeBetweenFileDownload) currMonitor = monitorChilds(childs) #print the result of the last released resource #compare succeded processes for status in ['ok', 'failed']: new = currMonitor[status] newset = set(new) diff = newset.difference(previousMonitor[status]) for i in diff: log("MAIN: Completed " + status + " process " + str(i)) previousMonitor = currMonitor if ifile['url'] == None or ifile['url'] == '': continue log('MAIN: Spawning new process ' + newid) logf = 'wget.log' targetFilename = connection['rep'] + os.path.sep + ifile['filename'] targetFolder = os.path.split(targetFilename)[0] if not os.path.exists(targetFolder): os.makedirs(targetFolder) #logcmd.write('mkdir -p %s \n' % targetFolder) cmd = y.agentcli.replace( '$LOG', logf).replace('$FILENAME', targetFilename).replace( '$URL', ifile['url']) + ' 2>> ' + logf + ' 1>> ' + logf cmd = cmd.replace('$USER', connection['username']) cmd = cmd.replace('$PASS', connection['password']) cmd = cmd.replace('$MAXBANDWIDTH', maxBandwidth) cmd = cmd.replace('$', '%24') #temporary network patch if False: cmd = cmd.replace('s1-pac1dmz-oda-v-20.sentinel1.eo.esa.int:80', 'localhost:14002') print cmd log(cmd) #logcmd.write(cmd+'\n') if performDownload: newProc = subprocess.Popen(['/bin/sh', '-c', cmd]) proc = dict() proc['proc'] = newProc proc['fileid'] = newid proc['filename'] = ifile['filename'] childs.append(proc) #time.sleep(1) #Wait that all downlaod/subprocessed are completed noErrorFound = True for proc in childs: exitCode = proc['proc'].wait() if exitCode == 0: y.setFileStatus(proc['fileid'], libQueue.cDwnStatusCompleted) else: print ' failed download of %s: %s' % (proc['fileid'], proc['filename']) noErrorFound = False if noErrorFound: #Download completed succesfully y.setDwnStatus(libQueue.cDwnStatusCompleted) y.unlock() return
def main(): #Get the first available item to be downloaded x=libQueue.queue() #resetDownloadQueue for debug purposes #x.resetDownloadQueue() y=x.getItemForDownload(str(os.getpid())) if y=='#': #no record found return #pprint.pprint(y.__dict__) log("Downloading %s" % y.id) #Redirecting the log #logFileName=prjFolder+'/log/downloads/'+ y.id + '.log' #logFile=open(logFileName,'w') #Get credential connection=getCredential(y.targetid)[0] #import pprint #pprint.pprint(connection) #Invoke agents for files to be downloaded previousMonitor=dict() previousMonitor['failed']=list() previousMonitor['ok']=list() for ifile in y.files: if ifile['dwnstatus']!=libQueue.cDwnStatusQueued: continue newid=str(ifile['fileid']) currMonitor=monitorChilds(childs) #wait for a free resource while(currMonitor['nRun']>=maxDwnFilesPerItem): log('MAIN: waiting for childs: ' + str(currMonitor['running'])) time.sleep(sleepTimeBetweenFileDownload) currMonitor=monitorChilds(childs) #print the result of the last released resource #compare succeded processes for status in ['ok','failed']: new=currMonitor[status] newset=set(new) diff=newset.difference(previousMonitor[status]) for i in diff: log("MAIN: Completed " + status + " process " +str(i)) previousMonitor=currMonitor if ifile['url']==None or ifile['url']=='': continue log('MAIN: Spawning new process ' +newid) logf='wget.log' targetFilename=connection['rep']+os.path.sep+ifile['filename'] targetFolder=os.path.split(targetFilename)[0] if not os.path.exists(targetFolder): os.makedirs(targetFolder) #logcmd.write('mkdir -p %s \n' % targetFolder) cmd=y.agentcli.replace('$LOG', logf).replace('$FILENAME', targetFilename).replace('$URL', ifile['url'])+ ' 2>> ' + logf + ' 1>> ' + logf cmd=cmd.replace('$USER',connection['username']) cmd=cmd.replace('$PASS',connection['password']) cmd=cmd.replace('$MAXBANDWIDTH',maxBandwidth) cmd=cmd.replace('$','%24') #temporary network patch if False: cmd=cmd.replace('s1-pac1dmz-oda-v-20.sentinel1.eo.esa.int:80','localhost:14002') print cmd log(cmd) #logcmd.write(cmd+'\n') if performDownload: newProc=subprocess.Popen(['/bin/sh', '-c', cmd]); proc=dict() proc['proc'] =newProc proc['fileid'] =newid proc['filename']=ifile['filename'] childs.append(proc) #time.sleep(1) #Wait that all downlaod/subprocessed are completed noErrorFound=True for proc in childs: exitCode=proc['proc'].wait() if exitCode==0: y.setFileStatus(proc['fileid'], libQueue.cDwnStatusCompleted) else: print ' failed download of %s: %s' % (proc['fileid'], proc['filename']) noErrorFound=False if noErrorFound: #Download completed succesfully y.setDwnStatus(libQueue.cDwnStatusCompleted) y.unlock() return