def main(addrs): parser = profile.add_args() parser.add_argument('genomes', nargs='+') args = parser.parse_args() prof_name, prof = profile.get_from_args(args) cli = RenderClient(addrs['tasks_loprio'], addrs['responses']) gen = iter_genomes(prof, 'out/%s' % prof_name, args.genomes) try: for task in gen: rq = cli.put(task) print ' >', task.id spawn(get_result, cli, task, rq) except KeyboardInterrupt: print 'Interrupt received, flushing' while cli.taskmap: for k, v in cli.taskmap.items(): if not v.getters: cli.taskmap.pop(k) print 'Still waiting on %d tasks...' % len(cli.taskmap) gevent.sleep(3)
def main(addrs): parser = profile.add_args() parser.add_argument("genomes", nargs="+") args = parser.parse_args() prof_name, prof = profile.get_from_args(args) cli = RenderClient(addrs["tasks_loprio"], addrs["responses"]) gen = iter_genomes(prof, "out/%s" % prof_name, args.genomes) try: for task in gen: rq = cli.put(task) print " >", task.id spawn(get_result, cli, task, rq) except KeyboardInterrupt: print "Interrupt received, flushing" while cli.taskmap: for k, v in cli.taskmap.items(): if not v.getters: cli.taskmap.pop(k) print "Still waiting on %d tasks..." % len(cli.taskmap) gevent.sleep(3)
def main(addrs): parser = profile.add_args() parser.add_argument('genomes', nargs='+') args = parser.parse_args() prof_name, prof = profile.get_from_args(args) cli = RenderClient(addrs['tasks_loprio'], addrs['responses']) gen = iter_genomes(prof, 'out/%s' % prof_name, args.genomes) try: for task in gen: rq = cli.put(task) print ' >', task.id spawn(get_result, cli, task, rq) gevent.sleep(0) except KeyboardInterrupt: print 'Interrupt received, flushing' while cli.taskmap: for k, v in cli.taskmap.items(): if not v.getters: cli.taskmap.pop(k) print 'Still waiting on %d tasks...' % len(cli.taskmap) gevent.sleep(3)
parser.add_argument('flame', metavar='ID', type=str, help="Filename or flame ID of genome to render") parser.add_argument('-g', action='store_true', dest='gfx', help="Show output in OpenGL window") parser.add_argument('-n', metavar='NAME', type=str, dest='name', help="Prefix to use when saving files (default is basename of input)") parser.add_argument('--suffix', metavar='NAME', type=str, dest='suffix', help="Suffix to use when saving files (default '')", default='') parser.add_argument('-o', metavar='DIR', type=str, dest='dir', help="Output directory", default='.') parser.add_argument('--resume', action='store_true', dest='resume', help="Don't overwrite output files that are newer than the input") parser.add_argument('--pause', action='store_true', help="Don't close the preview window after rendering is finished") parser.add_argument('-d', '--genomedb', metavar='PATH', type=str, help="Path to genome database (file or directory, default '.')", default='.') parser.add_argument('--subdir', action='store_true', help="Use basename as subdirectory of out dir, instead of prefix") parser.add_argument('--raw', metavar='PATH', type=str, dest='rawfn', help="Target file for raw buffer, to enable previews.") parser.add_argument('--half', action='store_true', help='Use half-loops when converting nodes to animations') parser.add_argument('--print', action='store_true', help="Print the blended animation and exit.") profile.add_args(parser) args = parser.parse_args() pname, prof = profile.get_from_args(args) main(args, prof)
def dispatch(args): pname, prof = profile.get_from_args(args) workers = args.worker if not workers: try: with open(os.path.expanduser('~/.cuburn-workers')) as fp: workers = filter(None, fp.read().split()) except: traceback.print_exc() pass if not workers: print >> sys.stderr, ('No workers defined. Pass --workers or set up ' '~/.cuburn-workers with one worker per line.') sys.exit(1) gdb = db.connect(args.genomedb) job_queue = gevent.queue.JoinableQueue(5) active_job_group = gevent.pool.Group() def fill_jobs(): for oid in args.flames: ids = [oid] if oid[0] == '@': with open(oid[1:]) as fp: ids = fp.read().split('\n') for id in ids: gnm, basename = gdb.get_anim(id) gprof = profile.wrap(prof, gnm) for name, times in profile.enumerate_jobs(gprof, basename, args, resume=True): job_queue.put(Job(gnm, name, times, 0)) job_filler = gevent.spawn(fill_jobs) def connect_to_worker(addr): host, device = addr.split('/') if host == 'localhost': distribute_path = os.path.expanduser('~/.cuburn_dist/distribute.py') args = [distribute_path, 'work', '--device', str(device)] subp = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE) assert read_str(subp.stdout) == ready_str else: connect_timeout = 5 while True: try: subp = subprocess.Popen( ['ssh', host, '.cuburn_dist/distribute.py', 'work', '--device', str(device)], stdin=subprocess.PIPE, stdout=subprocess.PIPE) assert read_str(subp.stdout) == ready_str break except: traceback.print_exc() gevent.sleep(connect_timeout) connect_timeout = min(600, connect_timeout * 2) return subp exiting = False worker_failure_counts = {} def run_job(addr): worker = connect_to_worker(addr) job = job_queue.get() evt = gevent.event.Event() def _run_job(): try: if job is None: write_str(worker.stdin, done_str) worker.stdin.close() return job_desc = dict(profile=prof, genome=job.genome, times=list(job.times), name=job.name) write_str(worker.stdin, json.dumps(job_desc)) worker.stdin.close() while True: msg_name = read_str(worker.stdout) if msg_name == closing_encoder_str: evt.set() elif msg_name == output_file_str: filename = job.name + read_str(worker.stdout) with open(filename + '.tmp', 'w') as fp: copy_filelike(worker.stdout, fp) os.rename(filename + '.tmp', filename) else: assert msg_name == done_str, 'no known event ' + msg_name break worker_failure_counts[addr] = 0 except: print >> sys.stderr, traceback.format_exc() worker_failure_counts[addr] = worker_failure_counts.get(addr, 0) + 1 if job.retry_count < 3: job_queue.put(Job(job.genome, job.name, job.times, job.retry_count + 1)) finally: job_queue.task_done() evt.set() greenlet = gevent.spawn(_run_job) active_job_group.add(greenlet) return greenlet, evt def run_worker(addr): while worker_failure_counts.get(addr) < 4 and not exiting: greenlet, evt = run_job(addr) evt.wait() worker_group = gevent.pool.Group() for addr in workers: worker_group.spawn(run_worker, addr) job_filler.join() # Flush all outstanding jobs and, possibly, retries while job_queue.join(): active_job_group.join() if job_queue.empty(): break # Close the remaining workers exiting = True map(job_queue.put, [None] * len(worker_group)) worker_group.join()
type=str, dest='rawfn', help="Target file for raw buffer, to enable previews.") parser.add_argument( '--half', action='store_true', help='Use half-loops when converting nodes to animations') parser.add_argument('--print', action='store_true', help="Print the blended animation and exit.") parser.add_argument('--list-devices', action='store_true', help="List devices and exit.") parser.add_argument( '--device', metavar='NUM', type=int, help="GPU device number to use (may differ from nvidia-smi).") parser.add_argument('--keep', action='store_true', help="Keep compiled kernels to help with profiling") profile.add_args(parser) args = parser.parse_args() if args.list_devices: list_devices() else: pname, prof = profile.get_from_args(args) main(args, prof)
def _get_profile(self, args=None): args = args or [] return profile.get_from_args(profile.add_args().parse_args(args))