def _submit(hostConfig, submitDict): """ Submit a protocol to a queue system. Return its job id. """ # Create forst the submission script to be launched # formatting using the template template = hostConfig.getSubmitTemplate() % submitDict #FIXME: CREATE THE PATH FIRST scripPath = submitDict['JOB_SCRIPT'] f = open(scripPath, 'w') #Ensure the path exists makeFilePath(scripPath) # Add some line ends because in some clusters it fails # to submit jobs if the submit script does not have end of line f.write(template+'\n\n') f.close() # This should format the command using a template like: # "qsub %(JOB_SCRIPT)s" command = hostConfig.getSubmitCommand() % submitDict gcmd = greenStr(command) print "** Submiting to queue: '%s'" % gcmd p = Popen(command, shell=True, stdout=PIPE) out = p.communicate()[0] # Try to parse the result of qsub, searching for a number (jobId) s = re.search('(\d+)', out) if s: return int(s.group(0)) else: print "** Couldn't parse %s ouput: %s" % (gcmd, redStr(out)) return UNKNOWN_JOBID
def _submit(hostConfig, submitDict, cwd=None, env=None): """ Submit a protocol to a queue system. Return its job id. """ # Create first the submission script to be launched # formatting using the template template = hostConfig.getSubmitTemplate() % submitDict # FIXME: CREATE THE PATH FIRST scripPath = submitDict['JOB_SCRIPT'] f = open(scripPath, 'w') # Ensure the path exists makeFilePath(scripPath) # Add some line ends because in some clusters it fails # to submit jobs if the submit script does not have end of line f.write(template + '\n\n') f.close() # This should format the command using a template like: # "qsub %(JOB_SCRIPT)s" command = hostConfig.getSubmitCommand() % submitDict gcmd = greenStr(command) print("** Submitting to queue: '%s'" % gcmd) p = Popen(command, shell=True, stdout=PIPE, cwd=cwd, env=env) out = p.communicate()[0] # Try to parse the result of qsub, searching for a number (jobId) # Review this, seems to exclusive to torque batch system s = re.search('(\d+)', str(out)) if p.returncode == 0 and s: job = int(s.group(0)) print("Launched job with id %s" % job) return job else: print("Couldn't submit to queue for reason: %s " % redStr(out.decode())) return UNKNOWN_JOBID
def __init__(self, filePath=''): """ If filePath is empty string, the general logger is used. """ self._filePath = filePath makeFilePath(self._filePath) self.config = getLogConfiguration() if self._filePath not in self.config['loggers']: self.config['handlers'][self._filePath] = { 'level': 'NOTSET', 'class': 'logging.handlers.RotatingFileHandler', 'formatter': 'fileFormat', 'filename': self._filePath, 'maxBytes': 100000 } self.config['loggers'][self._filePath] = { 'handlers': [self._filePath], 'level': 'NOTSET', 'propagate': False } # Note: if we want to see in the console what we also have in # run.log, add 'consoleHandler' to the list of 'handlers'. logging.config.dictConfig(self.config) self._log = logging.getLogger(self._filePath)
def generateMicImage(self, input_file, output_file=None): if not output_file: output_file = os.path.splitext(input_file)[0] + '.png' img = ImageHandler().createImage() img.read(input_file) pimg = getPILImage(img) pwutils.makeFilePath(output_file) pimg.save(output_file, "PNG")
def _insertAllSteps(self): self.initialIds = self._insertInitialSteps() self.micDict = OrderedDict() pwutils.makeFilePath(self._getAllDone()) micDict, self.streamClosed = self._loadInputList() pickMicIds = self._insertNewMicsSteps(micDict.values()) self._insertFinalSteps(pickMicIds)
def _insertAllSteps(self): self.initialIds = self._insertInitialSteps() self.micDict = OrderedDict() pwutils.makeFilePath(self._getAllDone()) micDict, self.streamClosed = self._loadInputList() pickMicIds = self._insertNewMicsSteps(micDict.values()) self._insertFinalSteps(pickMicIds)
def _writeDoneList(self, micList): """ Write to a text file the items that have been done. """ doneFile = self._getAllDone() if not os.path.exists(doneFile): pwutils.makeFilePath(doneFile) with open(doneFile, 'a') as f: for mic in micList: f.write('%d\n' % mic.getObjId())
def _writeDoneList(self, micList): """ Write to a text file the items that have been done. """ doneFile = self._getAllDone() if not os.path.exists(doneFile): pwutils.makeFilePath(doneFile) with open(doneFile, 'a') as f: for mic in micList: f.write('%d\n' % mic.getObjId())
def notifyWorkflow(self): try: #check if enviroment exists otherwise abort if not pwutils.envVarOn('SCIPION_NOTIFY'): return # Check the seconds range of the notify, by default one day seconds = int(os.environ.get('SCIPION_NOTIFY_SECONDS', '86400')) if self._modifiedBefore( seconds): # notify not more than once a day #print "sec, no notification", seconds return # INFO: now we are only sending the protocols names in the project. # We could pass namesOnly=False to get the full workflow template projectWorfklow = self.project.getProtocolsJson(namesOnly=True) #if list with workflow has not been altered do not sent it if not self._dataModified(projectWorfklow): #print "No change: Do not send new data" return else: # For compatibility with version 1.0 check # if Log directory exists. If it does not # create it #TODO REMOVE this check in scipion 1.3 dataFile = self._getDataFileName() # create the folder of the file path if not exists pwutils.makeFilePath(dataFile) with open(dataFile, 'w') as f: f.write(projectWorfklow) #print "change send new data" dataDict = { 'project_uuid': self._getUuid(), 'project_workflow': projectWorfklow } urlName = os.environ.get( 'SCIPION_NOTIFY_URL', config.SCIPION_STATS_WORKFLOW_APP).strip() urlName += "addOrUpdateWorkflow/" t = threading.Thread( name="notifier", target=lambda: self._sendData(urlName, dataDict)) t.start() # will execute function in a separate thread except Exception as e: print "Can't report usage: ", e
def _insertAllSteps(self): # Let's load input data for the already existing micrographs # before the streaming self.debug(">>> _insertAllSteps ") pwutils.makeFilePath(self._getAllDone()) self.micDict = OrderedDict() self.coordDict = {} micDict = self._loadInputList() self.initialIds = self._insertInitialSteps() pickMicIds = self._insertNewMicsSteps(micDict.values()) self._insertFinalSteps(pickMicIds)
def _insertAllSteps(self): # Let's load input data for the already existing micrographs # before the streaming self.debug(">>> _insertAllSteps ") pwutils.makeFilePath(self._getAllDone()) self.micDict = OrderedDict() self.coordDict = {} micDict = self._loadInputList() self.initialIds = self._insertInitialSteps() pickMicIds = self._insertNewMicsSteps(micDict.values()) self._insertFinalSteps(pickMicIds)
def notifyWorkflow(self): try: #check if enviroment exists otherwise abort if not pwutils.envVarOn('SCIPION_NOTIFY'): return # Check the seconds range of the notify, by default one day seconds = int(os.environ.get('SCIPION_NOTIFY_SECONDS', '86400')) if self._modifiedBefore(seconds): # notify not more than once a day #print "sec, no notification", seconds return # INFO: now we are only sending the protocols names in the project. # We could pass namesOnly=False to get the full workflow template projectWorfklow = self.project.getProtocolsJson(namesOnly=True) #if list with workflow has not been altered do not sent it if not self._dataModified(projectWorfklow): #print "No change: Do not send new data" return else: # For compatibility with version 1.0 check # if Log directory exists. If it does not # create it #TODO REMOVE this check in scipion 1.3 dataFile = self._getDataFileName() # create the folder of the file path if not exists pwutils.makeFilePath(dataFile) with open(dataFile,'w') as f: f.write(projectWorfklow) #print "change send new data" dataDict = {'project_uuid': self._getUuid(), 'project_workflow': projectWorfklow} urlName = os.environ.get('SCIPION_NOTIFY_URL', config.SCIPION_STATS_WORKFLOW_APP).strip() urlName += "addOrUpdateWorkflow/" t = threading.Thread(target=lambda: self._sendData(urlName, dataDict)) t.start() # will execute function in a separate thread except Exception as e: print "Can't report usage: ", e
def generate_image(self, input_file, outputName=None): output_root = join(self.images_path, basename(outputName)) output_file = output_root + '.jpg' print "Generating image: ", output_file if not exists(output_file): from PIL import Image self.img.read(join(self.project_path, input_file)) pimg = getPILImage(self.img) pwutils.makeFilePath(output_file) if self.bigThumb: pimg.save(output_file, "JPEG") if self.smallThumb: pimg.thumbnail((self.smallThumb, self.smallThumb), Image.ANTIALIAS) pimg.save(output_root + 't.jpg', "JPEG") return output_file
def generate_image(self, input_file, outputName=None): output_root = join(self.images_path, basename(outputName)) output_file = output_root + '.png' print "Generating image: ", output_file if not exists(output_file): from PIL import Image self.img.read(join(self.project_path, input_file)) pimg = getPILImage(self.img) pwutils.makeFilePath(output_file) if self.bigThumb: pimg.save(output_file, "PNG") if self.smallThumb: pimg.thumbnail((self.smallThumb, self.smallThumb), Image.ANTIALIAS) pimg.save(output_root + 't.png', "PNG") return output_file
def _submit(hostConfig, submitDict): """ Submit a protocol to a queue system. Return its job id. """ # Create forst the submission script to be launched # formatting using the template template = hostConfig.getSubmitTemplate() % submitDict #FIXME: CREATE THE PATH FIRST scripPath = submitDict['JOB_SCRIPT'] f = open(scripPath, 'w') #Ensure the path exists makeFilePath(scripPath) # Add some line ends because in some clusters it fails # to submit jobs if the submit script does not have end of line f.write(template + '\n\n') f.close() # This should format the command using a template like: # "qsub %(JOB_SCRIPT)s" command = hostConfig.getSubmitCommand() % submitDict gcmd = greenStr(command) print "** Submiting to queue: '%s'" % gcmd # zf = open('/home/jtq89441/Desktop/scipion.log','w+') # zf.write('It works!%s'%submitDict) # zf.close() # ---------------------------------- DLS_SCIPION = '/dls_sw/apps/scipion/release-1.2.1-zo' # command_for_recipe = 'module load %s &&'%DLS_SCIPION +'; '+ command projpath = submitDict['JOB_COMMAND'].split()[4] command_for_queue = '%s %s' % (command.split()[0], '/'.join( [projpath, command.split()[1]])) print 'command_for_queue: %s' % command_for_queue zocolo_cmd = 'module load dials; dials.python /dls_sw/apps/scipion/scipion_1_2_1_dials/scipion/pyworkflow/protocol/generic_template.py %s' % command_for_queue print zocolo_cmd print '****Before Zocolo****' msg_p = Popen(zocolo_cmd, shell=True) print '****After Zocolo****' # ------------------------------------ #Generating the recipe for ActiveMQ # default_configuration = '/dls_sw/apps/zocalo/secrets/credentials-live.cfg' # # override default stomp host # try: # StompTransport.load_configuration_file(default_configuration) # except workflows.Error as e: # print "Error: %s\n" % str(e) # # # StompTransport.add_command_line_options(parser) # # (options, args) = parser.parse_args(sys.argv[1:]) # stomp = StompTransport() # # message = {'recipes': [], # 'parameters': {}, # } # # Build a custom recipe # command_for_recipe = 'module load scipion/release-1.2.1-headless &&' + command # # recipe = {} # recipe['1'] = {} # recipe['1']['service'] = "motioncor2_runner" # recipe['1']['queue'] = "motioncor2_runner" # recipe['1']['parameters'] = {} # recipe['1']['parameters']['arguments'] = command_for_recipe # recipe['start'] = [[1, []]] # # message['custom_recipe'] = recipe # print "******************************** THIS IS THE SUBMITTED RECIPE**********************************************" # # stomp.connect() # test_valid_recipe = workflows.recipe.Recipe(recipe) # test_valid_recipe.validate() # print message # # stomp.send('processing_recipe',message) # print("\nMotioncor2 job submitted") ## end of recipe generation # Npn zocalo scipion send command p = Popen(command, shell=True, stdout=PIPE) out = p.communicate()[0] # Try to parse the result of qsub, searching for a number (jobId) s = re.search('(\d+)', out) if s: return int(s.group(0)) else: print "** Couldn't parse %s ouput: %s" % (gcmd, redStr(out)) return UNKNOWN_JOBID