def __init__(self, recipe=None, recipeURI=None, output_dir=None): from PYME.recipes import Recipe if recipe: if isinstance(recipe, string_types): self.recipe_text = recipe self.recipe = Recipe.fromYAML(recipe) else: self.recipe_text = recipe.toYAML() self.recipe = recipe self.recipeURI = None else: self.recipe = None if recipeURI is None: raise ValueError( 'recipeURI must be defined if no recipe given') else: from PYME.IO import unifiedIO self.recipeURI = recipeURI self.recipe = Recipe.fromYAML(unifiedIO.read(recipeURI)) self.output_dir = output_dir self.taskQueueURI = _getTaskQueueURI() #generate a queue ID as a hash of the recipe and the current time to_hash = self.recipeURI if self.recipeURI else self.recipe_text try: # hashlib requires bytes on py3 to_hash = to_hash.encode() except TypeError: # encoding without a string argument, i.e. already bytes pass h = hashlib.md5(to_hash) h.update(str(time.time()).encode()) self.queueID = h.hexdigest() # hexdigest returns str
def __init__(self, recipe=None, recipeURI=None): from PYME.recipes import Recipe if recipe: if isinstance(recipe, string_types): self.recipe_text = recipe self.recipe = Recipe.fromYAML(recipe) else: self.recipe_text = recipe.toYAML() self.recipe = recipe self.recipeURI = None else: self.recipe = None if recipeURI is None: raise ValueError( 'recipeURI must be defined if no recipe given') else: from PYME.IO import unifiedIO self.recipeURI = recipeURI self.recipe = Recipe.fromYAML(unifiedIO.read(recipeURI)) self.taskQueueURI = _getTaskQueueURI() #generate a queue ID as a hash of the recipe and the current time h = hashlib.md5(self.recipeURI if self.recipeURI else self.recipe_text) h.update('%s' % time.time()) self.queueID = h.hexdigest()
def main(): #start by finding out what recipe we're using - different recipes can have different options ap = ArgumentParser(usage='usage: %(prog)s [options] recipe.yaml') ap.add_argument('recipe') args, remainder = ap.parse_known_args() #load the recipe with open(args.recipe) as f: s = f.read() recipe = Recipe.fromYAML(s) #create a new parser to parse input and output filenames op = ArgumentParser() for ip in recipe.inputs: op.add_argument('--%s' % ip) for ot in recipe.outputs: op.add_argument('--%s' % ot) args = op.parse_args(remainder) inputs = {k: getattr(args, k) for k in recipe.inputs} outputs = {k: getattr(args, k) for k in recipe.outputs} ##Run the recipe runRecipe(recipe, inputs, outputs) #TODO - fix for contexts
def run_template(request): from PYME import config from PYME.IO import unifiedIO from PYME.recipes import Recipe from PYME.recipes import modules from PYME.cluster.rules import RecipeRule recipeURI = 'pyme-cluster://%s/%s' % ( server_filter, request.POST.get('recipeURL').lstrip('/')) output_directory = 'pyme-cluster://%s/%s' % ( server_filter, request.POST.get('recipeOutputPath').lstrip('/')) recipe_text = unifiedIO.safe_read(recipeURI).decode('utf-8') recipe = Recipe.fromYAML(recipe_text) # handle templated userfile inputs - these will be loaded by e.g. unifiedIO later for file_input in recipe.file_inputs: input_url = 'pyme-cluster://%s/%s' % ( server_filter, request.POST.get('%sURL' % file_input).lstrip('/')) recipe_text = recipe_text.replace('{' + file_input + '}', input_url) rule = RecipeRule(recipe=recipe_text, output_dir=output_directory, inputs={'input': request.POST.getlist('files', [])}) rule.push() return HttpResponseRedirect('/status/queues/')
def main(): # Start the tornado ioloop application ioloop = IOLoop.instance() if len(sys.argv) > 1: filename = sys.argv[1] with open(filename, 'r') as f: recipe_yaml = f.read() else: recipe_yaml = '' recipe = Recipe.fromYAML(recipe_yaml) print(recipe) # Instantiate the domain model fred = Person(name='Fred', age=42) # Create a web app serving the view with the domain model added to its # context. app = WebApp(template=template, context={'recipe': recipe}, handlers=[(r'/static/(.*)', tornado.web.StaticFileHandler, {'path': PYME.resources.get_web_static_dir()}),]) app.listen(8000) # Start serving the web app on port 8000. # # Point your web browser to http://localhost:8000/ to connect to this jigna # web app. Any operation performed on the client directly update the # model attributes on the server. print('Serving on port 8000...') ioloop.start()
def main(): # set matplotlib backend for offline figure generation #TODO - move this further down (ie. to the figure generation code itself)? import matplotlib matplotlib.use('Cairo', warn=False) #start by finding out what recipe we're using - different recipes can have different options ap = ArgumentParser() #usage = 'usage: %(prog)s [options] recipe.yaml') ap.add_argument('recipe') ap.add_argument('output_dir') ap.add_argument('-n', '--num-processes', default=NUM_PROCS) args, remainder = ap.parse_known_args() #load the recipe with open(args.recipe) as f: s = f.read() recipe = Recipe.fromYAML(s) output_dir = args.output_dir num_procs = args.num_processes if not os.path.exists(output_dir): os.makedirs(output_dir) #create a new parser to parse input and output filenames op = ArgumentParser() for ip in recipe.inputs: op.add_argument('--%s' % ip) args = op.parse_args(remainder) inputGlobs = {k: glob.glob(getattr(args, k)) for k in recipe.inputs} bake(recipe, inputGlobs, output_dir, num_procs)
def test_recipe_1(): rec = Recipe.fromYAML(recipe_1) im = ImageStack( filename=os.path.join(resources.get_test_data_dir(), 't_im.tif')) rec.execute(input=im) assert (np.allclose(rec.namespace['zoomed'].data_xyztc.shape, (88, 80, 241, 1, 2)))
def bake_recipe(recipe_filename, inputGlobs, output_dir, *args, **kwargs): with open(recipe_filename) as f: s = f.read() recipe = Recipe.fromYAML(s) if not os.path.exists(output_dir): os.makedirs(output_dir) bake(recipe, inputGlobs, output_dir, *args, **kwargs)
def LoadRecipeText(self, s, filename=''): self.currentFilename = filename self.activeRecipe = Recipe.fromYAML(s) #self.mICurrent.SetItemLabel('Run %s\tF5' % os.path.split(filename)[1]) try: self.activeRecipe.recipe_changed.connect(self.recipeView.update) self.activeRecipe.recipe_executed.connect(self.recipeView.update) self.activeRecipe.recipe_failed.connect(self.recipeView.update) self.recipeView.update() except AttributeError: pass
def extra_inputs(request): from PYME.IO import unifiedIO from PYME.recipes import Recipe recipeURI = ('pyme-cluster://%s/' % server_filter) + request.GET.get('recipeURL').lstrip('/') recipe = Recipe.fromYAML(unifiedIO.safe_read(recipeURI)) return render(request, 'recipes/extra_inputs.html', { 'file_inputs': recipe.file_inputs, 'serverfilter': server_filter })
def view_svg(request): from PYME.IO import unifiedIO from PYME.recipes import Recipe from PYME.recipes import modules from PYME.recipes import recipeLayout recipeURI = ('pyme-cluster://%s/' % server_filter) + request.GET.get('recipeURL').lstrip('/') recipe = Recipe.fromYAML(unifiedIO.safe_read(recipeURI)) svg = recipeLayout.to_svg(recipe.dependancyGraph()) return HttpResponse(svg, content_type='image/svg+xml')
def runRecipe(recipe, inputs, outputs, context={}, metadata_defaults={}): """Load inputs and run recipe, saving outputs. Parameters ---------- recipe : an instance of PYME.recipes.filters.ModuleCollection inputs : a dictionary mapping recipe input names to filenames. These are loaded and inserted into the namespace before running the recipe. outputs : a dictionary mapping recipe output names to filenames. The corresponding members of the namespace are saved to disk following execution of the recipe. context : a dictionary used for filename subsititutions metadata_defaults: a dictionary (or metadata handler) specifying metadata entries to use if input files have incomplete metadata """ try: if not isinstance(recipe, Recipe): # recipe is a string recipe = Recipe.fromYAML(recipe) #the recipe instance might be re-used - clear any previous data recipe.namespace.clear() #load any necessary inputs and populate the recipes namespace for key, filename in inputs.items(): recipe.loadInput(filename, key, metadata_defaults) ### Run the recipe ### res = recipe.execute() #Save any outputs [old-style, detected using the 'out' prefix. for k, v in outputs.items(): saveOutput(recipe.namespace[k], v) #new style output saving - using OutputModules recipe.save(context) except: logger.exception('Error running recipe') raise
def main(): os.environ[ 'DJANGO_SETTINGS_MODULE'] = 'clusterUI.settings' # path to your settings module application = get_wsgi_application() django_app = tornado.wsgi.WSGIContainer(application) tornado_app = JignaWebApp( handlers=[ (r'/static/(.*)', tornado.web.StaticFileHandler, { 'path': PYME.resources.get_web_static_dir() }), #(r'/media/(.*)', tornado.web.StaticFileHandler, {'path': MEDIA_URL}), #(r'/recipe_editor/(.*)', tornado.web.StaticFileHandler, {'path': os.path.dirname(html_recipe_editor.__file__)}), (r'.*', tornado.web.FallbackHandler, dict(fallback=django_app)), ], template=html_recipe_editor.template, context={'recipe': Recipe.fromYAML(rec_text)}) #server = tornado.httpserver.HTTPServer(tornado_app) http_server = tornado.httpserver.HTTPServer(tornado_app) http_server.listen(8889) tornado.ioloop.IOLoop.instance().start()
def computeLoop(self): while self._loop_alive: #loop over tasks - we pop each task and then delete it after processing #to keep memory usage down queueURL, taskDescr = self.inputQueue.get() if taskDescr['type'] == 'localization': try: task = remFitBuf.createFitTaskFromTaskDef(taskDescr) res = task() self.resultsQueue.put((queueURL, taskDescr, res)) except: import traceback traceback.print_exc() tb = traceback.format_exc() logger.exception(tb) self.resultsQueue.put( (queueURL, taskDescr, TaskError(taskDescr, tb))) #self.resultsQueue.put((queueURL, taskDescr, None)) elif taskDescr['type'] == 'recipe': from PYME.recipes import Recipe from PYME.recipes import modules try: taskdefRef = taskDescr.get('taskdefRef', None) if taskdefRef: #recipe is defined in a file - go find it recipe_yaml = unifiedIO.read(taskdefRef) else: #recipe is defined in the task recipe_yaml = taskDescr['taskdef']['recipe'] recipe = Recipe.fromYAML(recipe_yaml) #load recipe inputs logging.debug(taskDescr) for key, url in taskDescr['inputs'].items(): logging.debug('RECIPE: loading %s as %s' % (url, key)) recipe.loadInput(url, key) #print recipe.namespace recipe.execute() #save results context = { 'data_root': clusterIO.local_dataroot, 'task_id': taskDescr['id'].split('~')[0] } #update context with file stub and input directory try: principle_input = taskDescr['inputs'][ 'input'] #default input context['file_stub'] = os.path.splitext( os.path.basename(principle_input))[0] context['input_dir'] = unifiedIO.dirname( principle_input) except KeyError: pass try: od = taskDescr['output_dir'] # make sure we have a trailing slash # TODO - this should be fine for most windows use cases, as you should generally # use POSIX urls for the cluster/cluster of one, but might need checking if not od.endswith('/'): od = od + '/' context['output_dir'] = unifiedIO.dirname(od) except KeyError: pass #print taskDescr['inputs'] #print context #abuse outputs as context outputs = taskDescr.get('outputs', None) if not outputs is None: context.update(outputs) #print context, context['input_dir'] recipe.save(context) self.resultsQueue.put((queueURL, taskDescr, True)) except Exception: import traceback traceback.print_exc() tb = traceback.format_exc() logger.exception(tb) self.resultsQueue.put( (queueURL, taskDescr, TaskError(taskDescr, tb)))