def main(self, image_url, mex_url = None, bisque_token=None, bq = None, args = None): # Allow for testing by passing an alreay initialized session if bq is None: bq = BQSession().init_mex(mex_url, bisque_token) # Fetch the blob links if not os.path.exists ('videos'): os.makedirs ('videos') video = fetch_blob(bq, image_url, 'videos') print "VIDEO file ", video #pass arguments to MotionMeerkat scripts, located in MotionMeerkat/main.py #Structure arguments #Format call string callargs = ["python MotionMeerkat/main.py", "--i", video.values()[0], "--threshT", args[0], "--sub", args[1], "--mogh", args[2], "--mogv", args[3], "--accA", args[4], "--burn", args[5], "--frameSET", "--frame_rate", "1", "--makeV", "none", "--fileD", "Output"] print "Calling ", " ".join(callargs) #run MotionMeerkat r = call(" ".join(callargs), shell=True) if r != 0: bq.fail_mex ("Meerkat returned non-zero") #Post Results #get filename to get the destination of the csv to be posted fn=video.values()[0] fbs= os.path.basename(fn) head, tail = os.path.splitext(fbs) #post file frames_blob = bq.postblob(str("Output/" + head + "/Frames.csv")) #get file location from regex uri=re.search("uri=\"(.*?)\"", frames_blob).group(1) tags = [{ 'name': 'outputs','tag' : [{'name': 'frames_csv', 'type':'file', 'value':uri}]}] bq.finish_mex(tags = tags) sys.exit(0)
outputs = etree.Element('tag', name='outputs') summary = etree.SubElement(outputs, 'tag', name='summary') for r,v in resources.iteritems(): etree.SubElement(summary, 'tag', name=r, value=v) bq.finish_mex(tags=[outputs]) if __name__ == "__main__": import optparse parser = optparse.OptionParser() parser.add_option("-c", "--credentials", dest="credentials", help="credentials are in the form user:password") (options, args) = parser.parse_args() M = AnnotationHistograms() if options.credentials is None: mex_url, auth_token = args[:2] bq = BQSession().init_mex(mex_url, auth_token) else: mex_url = '' if not options.credentials: parser.error('need credentials') user,pwd = options.credentials.split(':') bq = BQSession().init_local(user, pwd) try: M.main(mex_url=mex_url, bq=bq ) except Exception, e: bq.fail_mex(traceback.format_exc()) sys.exit(0)
class RootTip(object): def setup(self): #if not os.path.exists(self.images): # os.makedirs(self.images) self.bq.update_mex('initializing') results = fetch_image_planes(self.bq, self.resource_url, '.') # extract gobject inputs tips = self.bq.mex.find('inputs', 'tag').find('image_url', 'tag').find('tips', 'gobject') with open('inputtips.csv', 'w') as TIPS: for point in tips.gobjects: print >> TIPS, "%(y)s, %(x)s" % dict(x=point.vertices[0].x, y=point.vertices[0].y) def start(self): self.bq.update_mex('executing') # Matlab requires trailing slash subprocess.call([EXEC, './']) def teardown(self): # Post all submex for files and return xml list of results gobjects = self._read_results() tags = [{ 'name': 'outputs', 'tag': [{ 'name': 'rootimage', 'type': 'image', 'value': self.resource_url, 'gobject': [{ 'name': 'root_tips', 'type': 'root_tips', 'gobject': gobjects }] }] }] self.bq.finish_mex(tags=tags) def _read_results(self, ): results = [] #image = self.bq.load(self.resource_url, view='full') #xmax, ymax, zmax, tmax, ch = image.geometry() # each line is a time point, tracked points in line tips = [t for t in csv.reader(open('tips.csv', 'rb'))] # each line is a time point, tracked points per line pt1, pt2, pt2 angles = [a for a in csv.reader(open('angles.csv', 'rb'))] # We want to generate lines in time to keep the tracked point in a single object # <gobject name="Tip 1" > # <gobject type=tipangle> # <point x= y= t=1..N > # <tag name="angle" value="" /> # <gobject> # ... # </gobject> # <gobject name="Tip 2" > # planes = len(angles) tip_count = planes and len(angles[0]) or 0 tracks = [] for pt in range(tip_count): gobs = [] for t_plane in range(planes): gobs.append({ 'type': 'tipangle', 'tag': [{ 'name': 'angle', 'value': angles[t_plane][pt] }], 'point': [{ 'vertex': [{ 't': str(t_plane), 'x': str(tips[t_plane][pt * 2]), 'y': str(tips[t_plane][pt * 2 + 1]) }] }] }) tracks.append({ 'name': 'tip-%s' % pt, 'type': 'roottip', 'gobject': gobs }) return tracks def run(self): parser = optparse.OptionParser() parser.add_option('-d', '--debug', action="store_true") parser.add_option('-n', '--dryrun', action="store_true") parser.add_option('--credentials') parser.add_option('--image_url') (options, args) = parser.parse_args() named = AttrDict(bisque_token=None, mex_url=None, staging_path=None, image_url=None) for arg in list(args): tag, sep, val = arg.partition('=') if sep == '=': named[tag] = val args.remove(arg) self.named_args = named self.staging_path = named.get('staging_path') if named.bisque_token: self.bq = BQSession().init_mex(named.mex_url, named.bisque_token) self.resource_url = self.bq.parameter_value('image_url') elif options.credentials: user, pwd = options.credentials.split(':') self.bq = BQSession().init_local(user, pwd) self.resource_url = named.image_url else: parser.error('need bisque_token or user credential') if self.resource_url is None: parser.error('Need a resource_url') if not args: commands = ['setup', 'start', 'teardown'] else: commands = [args] try: for command in commands: command = getattr(self, command) r = command() except Exception, e: logging.exception("problem during %s" % command) self.bq.fail_mex(msg="Exception during %s: %s" % (command, e)) sys.exit(1) sys.exit(0)
class CondorRunner(CommandRunner): """A Runtime to execute a module on a condor enabled system """ name = "condor" transfers = [] # Condor transfers (see condor docs) requirements = "" # Condor "&& (Memory > 3000) && IsWholeMachineSlot" dag_template = "" submit_template = "" def __init__(self, **kw): super(CondorRunner, self).__init__(**kw) def read_config(self, **kw): super(CondorRunner, self).read_config(**kw) self.debug("CondorRunner: read_config") self.load_section('condor', self.bisque_cfg) self.load_section('condor', self.module_cfg) self.load_section('condor_submit', self.bisque_cfg) self.load_section('condor_submit', self.module_cfg) def process_config(self, **kw): super(CondorRunner, self).process_config(**kw) self.info("process_config condor") # any listed file will be a transfer for mex in self.mexes: mex.files = strtolist(mex.get('files', [])) mex.output_files = strtolist(mex.get('output_files', [])) mex.files.append('runtime-module.cfg') mex.files.append(os.path.abspath( config_path('runtime-bisque.cfg'))) def setup_environments(self, **kw): super(CondorRunner, self).setup_environments(**kw) self.info("setup_environments condor") for mex in self.mexes: mex.transfers = list(mex.files) mex.transfers_out = list(mex.output_files) def command_start(self, **kw): super(CondorRunner, self).command_start(**kw) self.helper = CondorTemplates(self.sections['condor']) # Condor requires a real Launcher (executable) in order # to post process. If it does not exist we create a small stub topmex = self.mexes[0] executable = topmex.executable if len(self.mexes) > 1: # multimex executable = self.mexes[1].executable postargs = [] if self.options.verbose: postargs.append('-v') if self.options.debug: postargs.append('-d') if self.options.dryrun: postargs.append('-n') postargs.append('mex_id=%s' % topmex.mex_id) postargs.append('staging_path=%s' % topmex.staging_path) postargs.extend([ '%s=%s' % (k, v) for k, v in topmex.named_args.items() if k != 'mex_id' and k != 'staging_path' ]) postargs.append('mex_url=%s' % topmex.mex_url) postargs.append('bisque_token=%s' % topmex.bisque_token) postargs.append('condor_job_return=$RETURN') #postargs.append('$RETURN') postargs.append('finish') for mex in self.mexes: mex_vars = dict(mex) mex_vars.update(mex.named_args) mex_vars.update(self.sections['condor']) mex_vars.update(self.sections['condor_submit']) if mex.get('launcher') is None: mex.launcher = self.helper.construct_launcher(mex_vars) check_exec(mex.launcher) # Esnure this launcher is executable mex.launcher = os.path.basename(mex.launcher) self.debug("Creating submit file") top_vars = dict(topmex) top_vars.update(topmex.named_args) top_vars.update(self.sections['condor']) top_vars.update(self.sections['condor_submit']) top_vars.update( executable=executable[0], #arguments = ' '.join (self.executable[1:]), #transfers = ",".join(self.transfers), mexes=self.mexes, post_exec=topmex.launcher, post_args=" ".join(postargs), condor_submit="\n".join([ "%s=%s" % (k, v) for k, v in self.sections['condor_submit'].items() ])) self.helper.prepare_submit(top_vars) # Immediately go to execute return self.command_execute def command_execute(self, **kw): #self.info ("condor_execute: On %s", platform.node()) cmd = ['condor_submit_dag', self.helper.dag_path] process = dict(command_line=cmd, mex=self.mexes[0]) self.info("SUBMIT %s in %s", cmd, self.mexes[0].get('staging_path')) if not self.options.dryrun: submit = subprocess.Popen(cmd, cwd=self.mexes[0].get('staging_path'), stdout=subprocess.PIPE) out, err = submit.communicate() if submit.returncode != 0: self.command_failed(process, submit.returncode) # # get ID of dag runner cluster and store in runner_ids # runner_id = None # for line in out.split('\n'): # toks = line.split('job(s) submitted to cluster') # if len(toks) == 2: # runner_id = toks[1].strip().rstrip('.') # break # if runner_id is not None: # self.runner_ids[self.mexes[0].mex_id] = runner_id # Don't do anything after execute return None def command_finish(self, **kw): # Cleanup condor stuff and look for error files. topmex = self.mexes[0] job_return = int(topmex.named_args.get('condor_job_return', 0)) #job_return = int(topmex.arguments.pop()) self.info("condor_finish %s: return=%s", topmex.executable, job_return) if job_return != 0: if self.session is None: mex_url = topmex.named_args['mex_url'] token = topmex.named_args['bisque_token'] self.session = BQSession().init_mex(mex_url, token) # Possible look for log files and append to message here #if os.path.exists(''): # pass self.session.fail_mex(msg='job failed with return code %s' % job_return) return None topmex.status = "finished" return super(CondorRunner, self).command_finish(**kw) def command_failed(self, process, retcode): """Update the bisque server with a failed command for a mex""" mex = process['mex'] mex.status = "failed" command = " ".join(process['command_line']) msg = "%s: returned (non-zero) %s" % (command, retcode) self.error("condor_failed: " + msg) # update process mex if self.session is None: self.session = BQSession().init_mex(self.mexes[0].mex_url, self.mexes[0].bisque_token) if self.session.mex.value not in ('FAILED', 'FINISHED'): self.session.fail_mex(msg) def command_kill(self, **kw): """Kill the running module if possible """ self.info("Kill On %s", platform.node()) mex = kw.get('mex_tree') topmex = self.mexes[0] if mex is not None: mex_id = mex.get('resource_uniq') # get all condor schedds schedd_names = [] cmd = ['condor_status', '-long', '-schedd'] pk = subprocess.Popen(cmd, stdout=subprocess.PIPE) for line in pk.stdout: toks = line.split('=', 1) if len(toks) == 2: if toks[0].strip().lower() == 'name': schedd_names.append( toks[1].strip().strip('"').strip("'")) self.debug("schedds found: %s" % schedd_names) pk.communicate() if pk.returncode != 0: self.debug("condor_status failed") process = dict(command_line=cmd, mex=topmex) self.command_failed(process, pk.returncode) return None # for each one: condor_rm with condition "mexid == <mexid>" for schedd_name in schedd_names: cmd = [ 'condor_rm', '-name', schedd_name, '-constraint', 'MexID =?= "%s"' % mex_id ] self.debug("running %s", cmd) pk = subprocess.Popen(cmd, cwd=topmex.get('staging_path'), stdout=subprocess.PIPE, stderr=subprocess.PIPE) message, err = pk.communicate() self.info("condor_rm %s status = %s message = %s err = %s" % (schedd_name, pk.returncode, message, err)) if pk.returncode != 0: self.debug("condor_rm failed") process = dict(command_line=cmd, mex=topmex) self.command_failed(process, pk.returncode) return None if self.session is None: mex_url = topmex.named_args['mex_url'] token = topmex.named_args['bisque_token'] self.session = BQSession().init_mex(mex_url, token) self.session.fail_mex(msg='job stopped by user') else: self.debug("No mex provided") return None def command_status(self, **kw): message = subprocess.Popen([ 'condor_q', ], cwd=self.mexes[0].get('staging_path'), stdout=subprocess.PIPE).communicate()[0] self.info("status = %s ", message) return None
class SeedSize(object): def setup(self): if not os.path.exists(self.images): os.makedirs(self.images) self.bq.update_mex('initializing') if self.is_dataset: results = fetch_dataset(self.bq, self.resource_url, self.images) else: results = fetch_image_pixels(self.bq, self.resource_url, self.images) with open(self.image_map_name, 'wb') as f: pickle.dump(results, f) return 0 def start(self): self.bq.update_mex('executing') # Matlab requires trailing slash r = subprocess.call([EXEC, 'images/']) return r def teardown(self): with open(self.image_map_name, 'rb') as f: self.url2file = pickle.load(f) # self.file2url = dict((v,k) for k,v in self.url2file.iteritems()) summary = os.path.join(self.images, 'summary.csv') if not os.path.exists (summary): self.bq.fail_mex (msg = "did not find any seeds: missing %s" % summary) return 0 summary_tags = self._read_summary(summary) # Post all submex for files and return xml list of results tags = [] gobjects = [] submexes = [] if not self.is_dataset: localfiles = glob.glob(os.path.join(self.images, '*C.csv')) gobs = self._read_results(localfiles[0]) #tags = [{ 'name':'image_url', 'value' : self.resource_url}] tags = [{ 'name': 'outputs', 'tag' : [{'name': 'Summary', 'tag' : summary_tags} , {'name': 'seed-resource', 'type':'image', 'value':self.resource_url, 'gobject' : [{ 'name': 'seeds', 'type' : 'seedsize', 'gobject':gobs }], }], }] else: submexes = self._get_submexes() tags = [ { 'name': 'execute_options', 'tag' : [ {'name': 'iterable', 'value' : 'image_url' } ] }, { 'name': 'outputs', 'tag' : [{'name': 'Summary', 'tag' : summary_tags }, {'name': 'mex_url', 'value': self.mex_url, 'type': 'mex'}, {'name': 'image_url', 'type':'dataset', 'value':self.resource_url,}] }, ] # for i, submex in enumerate(mexlist): # tag, image_url = gettag(submex, 'image_url') # gob, gob_url = gettag(submex, 'SeedSize') # mexlink = { 'name' : 'submex', # 'tag' : [{ 'name':'mex_url', 'value':submex.get('uri')}, # { 'name':'image_url', 'value' : image_url}, # { 'name':'gobject_url', 'value' : gob.get('uri') } ] # } # tags.append(mexlink) self.bq.finish_mex(tags = tags, gobjects = gobjects, children= [('mex', submexes)]) return 0 def _get_submexes(self): submex = [] localfiles = glob.glob(os.path.join(self.images, '*C.csv')) result2url = dict( (os.path.splitext(f)[0] + 'C.csv', u) for f, u in self.file2url.items()) for result in localfiles: gobs = self._read_results(result) if result not in result2url: logging.error ("Can't find url for %s given files %s and map %s" % result, localfiles, result2url) mex = { 'type' : self.bq.mex.type, 'name' : self.bq.mex.name, 'value': 'FINISHED', 'tag': [ { 'name': 'inputs', 'tag' : [ {'name': 'image_url', 'value' : result2url [result] } ] }, { 'name': 'outputs', 'tag' : [{'name': 'seed-resource', 'type':'image', 'value': result2url [result], 'gobject':{ 'name': 'seeds', 'type': 'seedsize', 'gobject': gobs}, }] }] } submex.append (mex) return submex #url = self.bq.service_url('data_service', 'mex', query={ 'view' : 'deep' }) #response = self.bq.postxml(url, d2xml({'request' : {'mex': submex}} )) #return response def _read_summary(self, csvfile): #%mean(area), mean(minoraxislen), mean(majoraxislen), standarddev(area), #standarddev(minoraxislen), standarddev(majoraxislen), total seedcount, #mean thresholdused, weighted mean of percentclusters1, weighted mean of percentclusters2 f= open(csvfile,'rb') rows = csv.reader (f) tag_names = [ 'mean_area', 'mean_minoraxis', 'mean_majoraxis', 'std_area', 'std_minoraxis', 'std_majoraxis', 'seedcount', 'mean_threshhold', 'weighted_mean_cluster_1','weighted_mean_cluster_2', ] # Read one row(rows.next()) and zip ( name, col) unpacking in d2xml format summary_tags = [ { 'name': n[0], 'value' : n[1] } for n in itertools.izip(tag_names, rows.next()) ] f.close() return summary_tags def _read_results(self, csvfile): results = [] f= open(csvfile,'rb') rows = csv.reader (f) for col in rows: results.append( { 'type' : 'seed', 'tag' : [ { 'name': 'area', 'value': col[0]}, { 'name': 'major', 'value': col[2]}, { 'name': 'minor', 'value': col[1]} ], 'ellipse' : { 'vertex' : [ { 'x': col[3], 'y':col[4], 'index':0 }, { 'x': float(col[3]) - abs(float(col[8]) - float(col[3])), 'y':col[9], 'index':1 }, { 'x': col[6], 'y':col[7], 'index':2 }] } }) f.close() return results def run(self): logging.basicConfig(level=logging.DEBUG) parser = optparse.OptionParser() parser.add_option('-d','--debug', action="store_true") parser.add_option('-n','--dryrun', action="store_true") #parser.add_option('--resource_url') #parser.add_option('--mex_url') #parser.add_option('--staging_path') #parser.add_option('--bisque_token') #parser.add_option('--credentials') # Parse named arguments from list (options, args) = parser.parse_args() named_args =dict( [ y for y in [ x.split ('=') for x in args ] if len (y) == 2] ) args = [ x for x in args if '=' not in x ] staging_path = '.' self.auth_token = named_args.get ('bisque_token') self.image_map_name = os.path.join(staging_path, IMAGE_MAP) self.resource_url = named_args.get ('image_url') self.mex_url = named_args.get ('mex_url') self.images = os.path.join(staging_path, 'images') + os.sep if self.auth_token: self.bq = BQSession().init_mex(self.mex_url, self.auth_token) else: user,pwd = options.credentials.split(':') self.bq = BQSession().init_local(user,pwd) resource_xml = self.bq.fetchxml (self.resource_url, view='short') self.is_dataset = resource_xml.tag == 'dataset' if len(args) == 1: commands = [ args.pop(0)] else: commands =['setup','start', 'teardown'] #if command not in ('setup','teardown', 'start'): # parser.error('Command must be start, setup or teardown') # maltab code requires trailing slash.. try: for command in commands: command = getattr(self, command) r = command() except Exception, e: logging.exception ("problem during %s" % command) self.bq.fail_mex(msg = "Exception during %s: %s" % (command, str(e))) sys.exit(1) sys.exit(r)
class CommandRunner(BaseRunner): """Small extension to BaseRunner to actually execute the script. """ name = "command" def __init__(self, **kw): super(CommandRunner, self).__init__(**kw) self.log = logging.getLogger("bq.engine_service.command_run") def read_config(self, **kw): super(CommandRunner, self).read_config(**kw) self.debug("CommandRunner: read_config") self.load_section('command', self.module_cfg) # Runner's name def setup_environments(self, **kw): super(CommandRunner, self).setup_environments(**kw) for mex in self.mexes: if not mex.executable: mex.log_name = os.path.join(mex.rundir, "topmex.log") else: mex.log_name = os.path.join(mex.rundir, "%s.log" % mex.executable[0]) def command_single_entrypoint(self, entrypoint, callback, **kw): "Execute specific entrypoint" mex = self.mexes[0] # topmex command_line = list(self.entrypoint_executable) # add entrypoint to command_line command_line += ['--entrypoint', entrypoint] # enclose options that start with '-' in quotes to handle numbers properly (e.g., '-3.3') command_line = [ tok if tok.startswith('--') or not tok.startswith('-') else '"%s"' % tok for tok in command_line ] rundir = mex.get('rundir') if self.options.dryrun: self.info("DryRunning '%s' in %s" % (' '.join(command_line), rundir)) else: self.info("running '%s' in %s" % (' '.join(command_line), rundir)) proc = dict(command_line=command_line, logfile=mex.log_name, rundir=rundir, mex=mex, env=self.process_environment, entrypoint_callback=callback, entrypoint_kw=kw) #from bq.engine.controllers.execone import execone #retcode = execone (proc) #if retcode: # self.command_failed(proc, retcode) self.pool.schedule(proc, success=self.entrypoint_success, fail=self.command_fail) return None def entrypoint_success(self, proc): self.info("Entrypoint Success for %s", proc) if 'entrypoint_callback' in proc: proc['entrypoint_callback'](**proc['entrypoint_kw']) def command_execute(self, **kw): "Execute the commands locally specified the mex list" self.execute_kw = kw self.processes = [] for mex in self.mexes: if not mex.executable: self.info('skipping mex %s ' % mex) continue command_line = list(mex.executable) #command_line.extend (mex.arguments) # enclose options that start with '-' in quotes to handle numbers properly (e.g., '-3.3') command_line = [ tok if tok.startswith('--') or not tok.startswith('-') else '"%s"' % tok for tok in command_line ] rundir = mex.get('rundir') if self.options.dryrun: self.info("DryRunning '%s' in %s" % (' '.join(command_line), rundir)) continue self.info("running '%s' in %s" % (' '.join(command_line), rundir)) self.info('mex %s ' % mex) self.processes.append( dict(command_line=command_line, logfile=mex.log_name, rundir=rundir, mex=mex, status='waiting', env=self.process_environment)) for p in self.processes: self.pool.schedule(p, success=self.command_success, fail=self.command_fail) return None # # ****NOTE*** # # execone must be in engine_service as otherwise multiprocessing is unable to find it # # I have no idea why not. # from bq.engine.controllers.execone import execone # if self.pool: # log.debug ('Using async ppool %s with %s ' % (self.pool, self.processes)) # #self.pool.map_async(fun, [1,2], callback = self.command_return) # self.pool.map_async(execone, self.processes, callback = self.command_return) # else: # for p in self.processes: # retcode = execone (p) # if retcode: # self.command_failed(p, retcode) # return self.command_finish #return None def command_success(self, proc): "collect return values when mex was executed asynchronously " self.info("SUCCESS Command %s with %s", " ".join(proc.get('command_line')), proc.get('return_code')) self.check_pool_status(proc, 'finished') def command_fail(self, process): """Update the bisque server with a failed command for a mex""" command = " ".join(process['command_line']) retcode = process['return_code'] exc = process.get('with_exception', None) msg = "FAILED %s: returned %s" % (command, retcode) if exc is not None: msg = "%s: exception %s" % (msg, repr(exc)) process['fail_message'] = msg self.check_pool_status(process, 'failed') def command_kill(self, **kw): """Kill the running module if possible """ mex = kw.get('mex_tree') topmex = self.mexes[0] if mex is not None: mex_id = mex.get('resource_uniq') self.pool.kill(selector_fct=lambda task: '00-' + task['mex'].get( 'mex_url').split('/00-', 1)[1].split('/', 1)[0] == mex_id) if self.session is None: mex_url = topmex.named_args['mex_url'] token = topmex.named_args['bisque_token'] self.session = BQSession().init_mex(mex_url, token) self.session.fail_mex(msg='job stopped by user') else: self.debug("No mex provided") return None def check_pool_status(self, p, status): # Check that all have finished or failed with self.pool.pool_lock: p['status'] = status all_status = [p['status'] for p in self.processes] for status in all_status: if status not in ('finished', 'failed'): return self.info("All processes have returned %s", all_status) # all are done.. so check if we finished correctly if 'failed' not in all_status: self.mexes[0].status = 'finished' self.command_finish(**self.execute_kw) return # there was a failue: msg = '\n'.join( p.get('fail_message') for p in self.processes if p['status'] == 'fail') self.mexes[0].status = 'failed' self.error(msg) if self.session is None: self.session = BQSession().init_mex(self.mexes[0].mex_url, self.mexes[0].bisque_token) if self.session.mex.value not in ('FAILED', 'FINISHED'): self.session.fail_mex(msg)
class RootTip(object): def setup(self): #if not os.path.exists(self.images): # os.makedirs(self.images) self.status = 0 self.bq.update_mex('initializing') results = fetch_image_planes(self.bq, self.resource_url, '.') def start(self): self.bq.update_mex('executing') # Matlab requires trailing slash self.status = subprocess.call([EXEC, './']) def teardown(self): # Post all submex for files and return xml list of results self.bq.update_mex('checking results') if self.status != 0: self.bq.fail_mex ("Bad result code form analysis: %d" % self.status) return gobjects = self._read_results() tags = [{ 'name': 'outputs', 'tag' : [{'name': 'roots', 'type':'image', 'value':self.resource_url, 'gobject' : [{ 'name': 'root_tips', 'type': 'root_tips', 'gobject' : gobjects }] }] }] self.bq.update_mex('saving results') self.bq.finish_mex(tags = tags) def _read_results(self, ): results = [] image = self.bq.load(self.resource_url, view='full') xmax, ymax, zmax, tmax, ch = image.geometry() tips = csv.reader(open('tips.csv','rb')) angles = csv.reader(open('angle.csv','rb')) grates = csv.reader(open('gr.csv','rb')) for index, (tip, angle, gr) in enumerate(itertools.izip(tips, angles, grates)): results.append({ 'type' : 'tipangle', 'tag' : [{ 'name': 'angle', 'value': angle[0]}, { 'name': 'growth', 'value': gr[0]}, ], 'point' : { 'vertex' : [ { 'x': str(xmax - int(tip[1])), 'y':tip[0], 't':index } ] , } }) return results def run(self): parser = optparse.OptionParser() parser.add_option('-d','--debug', action="store_true") parser.add_option('-n','--dryrun', action="store_true") parser.add_option('--credentials') parser.add_option('--image_url') (options, args) = parser.parse_args() named = AttrDict (bisque_token=None, mex_url=None, staging_path=None, image_url=None) for arg in list(args): tag, sep, val = arg.partition('=') if sep == '=': named[tag] = val args.remove(arg) if named.bisque_token: self.bq = BQSession().init_mex(named.mex_url, named.bisque_token) self.resource_url = self.bq.parameter_value ('image_url') elif options.credentials: user,pwd = options.credentials.split(':') self.bq = BQSession().init_local(user,pwd) self.resource_url = named.image_url else: parser.error('need bisque_token or user credential') if self.resource_url is None: parser.error('Need a resource_url') if not args : commands = ['setup', 'start', 'teardown'] else: commands = [ args ] try: for command in commands: command = getattr(self, command) r = command() except Exception, e: logging.exception ("problem during %s" % command) self.bq.fail_mex(msg = "Exception during %s: %s" % (command, e)) sys.exit(1) sys.exit(0)