def graph_animation(job_list, context, dirname="compmake-graph-animation", dpi=150, width=900, height=900, label='function'): """ Runs a step-by-step animation. Registers the handlers. Then call 'make' or 'parmake'. """ possible = ['none', 'id', 'function'] if not label in possible: msg = 'Invalid label method %r not in %r.' % (label, possible) raise ValueError(msg) Global.dirname = dirname Global.job_list = list(job_list) Global.graph_params = dict(filter='dot', format='png', label=label, color=True, cluster=True) Global.dpi = dpi Global.size = (width, height) Global.processing = set() events = ['manager-job-starting', 'manager-job-failed', 'manager-job-succeeded', 'manager-succeeded', 'manager-phase'] for e in events: register_handler(e, update_graph)
def graph_animation(job_list, context, dirname="compmake-graph-animation", dpi=150, width=900, height=900, label='function'): """ Runs a step-by-step animation. Registers the handlers. Then call 'make' or 'parmake'. """ possible = ['none', 'id', 'function'] if not label in possible: msg = 'Invalid label method %r not in %r.' % (label, possible) raise ValueError(msg) Global.dirname = dirname Global.job_list = list(job_list) Global.graph_params = dict(filter='dot', format='png', label=label, color=True, cluster=True) Global.dpi = dpi Global.size = (width, height) Global.processing = set() events = [ 'manager-job-starting', 'manager-job-failed', 'manager-job-succeeded', 'manager-succeeded', 'manager-phase' ] for e in events: register_handler(e, update_graph)
def __init__(self): register_handler("job-progress", self.event_job_progress) register_handler("job-progress-plus", self.event_job_progress_plus) register_handler("manager-progress", self.event_manager_progress) self.processing = set() self.targets = set() self.all_targets = set() self.todo = set() self.failed = set() self.ready = set() self.done = set() # Status of jobs in "processing" state self.status = {} self.status_plus = {}
'''This plugin dumps all events received''' import sys, time from compmake.events.registrar import register_handler # We save it, because it will be redirected during job execution stream = sys.stderr other_stream = sys.stdout def print_event(event): other_stream.flush() age = time.time() - event.timestamp stream.write('%.3fs ago: %s: %s\n' % (age, event.name, event.kwargs)) stream.flush() register_handler("*", print_event)
''' Implements the initial and final banner ''' from compmake.events.registrar import register_handler from compmake.utils.visualization import colored from compmake.jobs.storage import all_jobs, get_namespace from compmake import version compmake_url = 'http://compmake.org' compmake_issues_url = 'http://compmake.org' banner = "Tame your Python computations!" banner2 = "" def console_starting(event): #@UnusedVariable # starting console print "%s %s -- ``%s,, -- %s " % ( colored('Compmake', attrs=['bold']), version, banner, banner2) print "Welcome to the compmake console. " + \ "(write 'help' for a list of commands)" njobs = len(list(all_jobs())) print("%d jobs loaded; using namespace '%s'." % (njobs, get_namespace())) def console_ending(event): #@UnusedVariable print "Thanks for using compmake. Problems? Suggestions? \ Praise? Go to %s" % colored(compmake_issues_url, attrs=['bold']) register_handler('console-starting', console_starting) register_handler('console-ending', console_ending)
def parmake_job2(args): """ args = tuple job_id, context, queue_name, show_events Returns a dictionary with fields "user_object", "new_jobs", 'delete_jobs'. "user_object" is set to None because we do not want to load in our thread if not necessary. Sometimes it is necessary because it might contain a Promise. """ job_id, context, event_queue_name, show_output = args # @UnusedVariable check_isinstance(job_id, str) check_isinstance(event_queue_name, str) from .pmake_manager import PmakeManager event_queue = PmakeManager.queues[event_queue_name] db = context.get_compmake_db() setproctitle('compmake:%s' % job_id) class G(): nlostmessages = 0 try: # We register a handler for the events to be passed back # to the main process def handler(event): try: if not CompmakeConstants.disable_interproc_queue: event_queue.put(event, block=False) except Full: G.nlostmessages += 1 # Do not write messages here, it might create a recursive # problem. # sys.stderr.write('job %s: Queue is full, message is lost.\n' # % job_id) remove_all_handlers() if show_output: register_handler("*", handler) def proctitle(event): stat = '[%s/%s %s] (compmake)' % (event.progress, event.goal, event.job_id) setproctitle(stat) register_handler("job-progress", proctitle) publish(context, 'worker-status', job_id=job_id, status='started') # Note that this function is called after the fork. # All data is conserved, but resources need to be reopened try: db.reopen_after_fork() # @UndefinedVariable except: pass publish(context, 'worker-status', job_id=job_id, status='connected') res = make(job_id, context=context) publish(context, 'worker-status', job_id=job_id, status='ended') res['user_object'] = None result_dict_check(res) return res except KeyboardInterrupt: assert False, 'KeyboardInterrupt should be captured by make() (' \ 'inside Job.compute())' except JobInterrupted: publish(context, 'worker-status', job_id=job_id, status='interrupted') raise except JobFailed: raise except BaseException: # XXX raise except: raise finally: publish(context, 'worker-status', job_id=job_id, status='cleanup') setproctitle('compmake-worker-finished %s' % job_id)
def parmake_job2(args): """ args = tuple job_id, context, queue_name, show_events Returns a dictionary with fields "user_object", "new_jobs", 'delete_jobs'. "user_object" is set to None because we do not want to load in our thread if not necessary. Sometimes it is necessary because it might contain a Promise. """ job_id, context, event_queue_name, show_output = args # @UnusedVariable check_isinstance(job_id, str) check_isinstance(event_queue_name, str) from .pmake_manager import PmakeManager event_queue = PmakeManager.queues[event_queue_name] db = context.get_compmake_db() setproctitle('compmake:%s' % job_id) class G(): nlostmessages = 0 try: # We register a handler for the events to be passed back # to the main process def handler( event): try: if not CompmakeConstants.disable_interproc_queue: event_queue.put(event, block=False) except Full: G.nlostmessages += 1 # Do not write messages here, it might create a recursive # problem. # sys.stderr.write('job %s: Queue is full, message is lost.\n' # % job_id) remove_all_handlers() if show_output: register_handler("*", handler) def proctitle(event): stat = '[%s/%s %s] (compmake)' % (event.progress, event.goal, event.job_id) setproctitle(stat) register_handler("job-progress", proctitle) publish(context, 'worker-status', job_id=job_id, status='started') # Note that this function is called after the fork. # All data is conserved, but resources need to be reopened try: db.reopen_after_fork() # @UndefinedVariable except: pass publish(context, 'worker-status', job_id=job_id, status='connected') res = make(job_id, context=context) publish(context, 'worker-status', job_id=job_id, status='ended') res['user_object'] = None result_dict_check(res) return res except KeyboardInterrupt: assert False, 'KeyboardInterrupt should be captured by make() (' \ 'inside Job.compute())' except JobInterrupted: publish(context, 'worker-status', job_id=job_id, status='interrupted') raise except JobFailed: raise except BaseException: # XXX raise except: raise finally: publish(context, 'worker-status', job_id=job_id, status='cleanup') setproctitle('compmake-worker-finished')
def main(): setproctitle('compmake') parser = OptionParser(version=version) parser.add_option("--slave", action="store_true", default=False, dest="slave", help="[internal] Runs compmake in slave mode.") parser.add_option("--redis_events", action="store_true", default=False, dest="redis_events", help="[internal] Relays events using Redis.") config_populate_optparser(parser) (options, args) = parser.parse_args() initialize_backend() # We load plugins after we parsed the configuration from compmake import plugins #@UnusedImport if options.redis_events: if not compmake_config.db == 'redis': #@UndefinedVariable error('Cannot use redis_events without redis.') sys.exit(-2) from compmake.storage.redisdb import RedisInterface # register an handler that will capture all events def handler(event): RedisInterface.events_push(event) remove_all_handlers() register_handler("*", handler) if not options.slave: # XXX make sure this is the default set_compmake_status(compmake_status_interactive) # TODO: add command namespace # TODO: add command "load" if not args: user_error('I expect at least one parameter (module name)') sys.exit(-2) module_name = args[0] args = args[1:] if module_name.endswith('.py') or (module_name.find('/') > 0): warning('You passed a string "%s" which looks like a filename.' % module_name) module_name = module_name.replace('/', '.') module_name = module_name.replace('.py', '') warning('However, I need a module name. I will try with "%s".' % module_name) set_namespace(module_name) compmake.is_it_time = True try: __import__(module_name) except Exception as e: error('Error while trying to import module "%s": %s' % (module_name, e)) traceback.print_exc(file=sys.stderr) sys.exit(-5) # TODO: BUG: XXX: remove old jobs those in defined_this_section else: set_compmake_status(compmake_status_slave) if not args: user_error('I expect at least one parameter (namespace name)') sys.exit(-2) module_name = args.pop(0) set_namespace(module_name) if args: try: # XXX is this redudant? # compmake_config.interactive = False retcode = interpret_commands(args) # print "Exiting with retcode %s" % retcode sys.exit(retcode) except UserError as e: user_error(e) sys.exit(-6) else: retcode = interactive_console() sys.exit(retcode)
counter = 0 def console_write(s): ''' Writes a line that will be erased. ''' cols = get_screen_columns() s = string.ljust(s, cols) stream.write(s) stream.write('\r') def job_redefined(event): #@UnusedVariable #stream.write('\n') stream.write(colored('Redefined %s\r' % event.job_id, 'yellow', attrs=['bold'])) stream.write(colored(event.reason, 'yellow')) #stream.write('\n') def job_defined(event): global counter counter += 1 console_write('compmake: defining job #%d %s' % (counter, event.job_id)) register_handler('job-redefined', job_redefined) register_handler('job-defined', job_defined) # register_handler('job-already-defined', lambda event: # console_write('Confirming job %s' % event.job_id))
x += ["%s/%s" % (frame.iterations[0] + 1, frame.iterations[1])] if level >= 4 and frame.iteration_desc is not None: x += ["(" + frame.iteration_desc + ")"] if i < len(stack) - 1: x += ['>>'] X += ["[" + " ".join(x) + "]" ] return " ".join(X) cols, rows = getTerminalSize() #@UnusedVariable choice = '%d processing' % len(tracker.status) for level in [4, 3, 2, 1, 0]: x = s + get_string(level) if len(x) <= cols: choice = x break choice = string.ljust(choice, cols - 1) stream.write(choice) stream.write('\r') register_handler('manager-progress', handle_event) register_handler('job-progress', handle_event) register_handler('job-progress-plus', handle_event)