def test_locks(self): def work(x): assert os.path.exists(fn) with open(fn, 'a+') as f: while True: try: fcntl.lockf(f, fcntl.LOCK_EX) break except IOError as e: if e.errno == errno.ENOLCK: time.sleep(0.01) pass else: raise f.seek(0) assert '' == f.read() f.write('%s' % x) f.flush() # time.sleep(0.01) f.seek(0) f.truncate(0) fcntl.lockf(f, fcntl.LOCK_UN) fos, fn = tempfile.mkstemp() # (dir='/try/with/nfs/mounted/dir') f = open(fn, 'w') f.close() for x in parimap(work, range(100), nprocs=10, eprintignore=()): pass os.close(fos) os.remove(fn)
def _iter_parallel_chains(draws, step, stage_path, progressbar, model, n_jobs, chains=None): """ Do Metropolis sampling over all the chains with each chain being sampled 'draws' times. Parallel execution according to n_jobs. """ if chains is None: chains = list(range(step.n_chains)) idxs = chains else: idxs = list(range(len(chains))) trace_list = [] if progressbar: display = True else: display = False pack_pb = ['False' for i in range(n_jobs - 1)] + [display] block_pb = [] list_pb = [] for i in range(int(len(chains) / n_jobs)): block_pb.append(pack_pb) map(list_pb.extend, block_pb) logger.info('Initialising chain traces ...') for chain in chains: trace_list.append(backend.Text(stage_path, model=model)) logger.info('Sampling ...') pshared = dict(draws=draws, trace_list=trace_list, progressbars=list_pb, tune=None, model=model) work = [(step, chain, idx, step.population[step.resampling_indexes[chain]]) for chain, idx in zip(chains, idxs)] for chain in tqdm(parimap.parimap(work_chain, work, pshared=pshared, nprocs=n_jobs), total=len(chains)): pass
def test_parimap(self): # random.seed(0) for i in range(50): nprocs = random.randint(1, 10) nx = random.randint(0, 1000) ny = random.randint(0, 1000) icrash = random.randint(0, 1000) # print 'testing %i %i %i %i...' % (nprocs, nx, ny, icrash) def work(x, y): if x == icrash: raise Crash(str((x, y))) a = random.random() if a > 0.5: time.sleep((a * 0.01)**2) return x + y I1 = parimap(work, xrange(nx), xrange(ny), nprocs=nprocs, eprintignore=Crash) I2 = imapemulation(work, xrange(nx), xrange(ny)) while True: e1, e2 = None, None r1, r2 = None, None end1, end2 = None, None try: r1 = I1.next() except StopIteration: end1 = True except Crash, e1: pass try: r2 = I2.next() except StopIteration: end2 = True except Crash, e2: pass assert r1 == r2, str((r1, r2)) assert type(e1) == type(e2) assert end1 == end2 if end1 or end2: break
def go(environment, force=False, preserve=False, nparallel=1, status='state'): g_data = (environment, force, preserve, status, nparallel) g_state[id(g_data)] = g_data nevents = environment.nevents_selected for x in parimap.parimap(process_event, range(environment.nevents_selected), [id(g_data)] * nevents, nprocs=nparallel): pass
def test_parimap(self): # random.seed(0) for i in range(50): nprocs = random.randint(1, 10) nx = random.randint(0, 1000) ny = random.randint(0, 1000) icrash = random.randint(0, 1000) # print 'testing %i %i %i %i...' % (nprocs, nx, ny, icrash) def work(x, y): if x == icrash: raise Crash(str((x, y))) a = random.random() if a > 0.5: time.sleep((a*0.01)**2) return x+y I1 = parimap( work, range(nx), range(ny), nprocs=nprocs, eprintignore=Crash) I2 = imapemulation(work, range(nx), range(ny)) while True: exc1, exc2 = None, None res1, res2 = None, None end1, end2 = None, None try: res1 = next(I1) except StopIteration: end1 = True except Crash as e1: exc1 = e1 try: res2 = next(I2) except StopIteration: end2 = True except Crash as e2: exc2 = e2 assert res1 == res2, str((res1, res2)) assert type(exc1) == type(exc2) assert end1 == end2 if end1 or end2: break
def test_parimap(self): # random.seed(0) for i in range(50): nprocs = random.randint(1, 10) nx = random.randint(0, 1000) ny = random.randint(0, 1000) icrash = random.randint(0, 1000) # print 'testing %i %i %i %i...' % (nprocs, nx, ny, icrash) def work(x, y): if x == icrash: raise Crash(str((x, y))) a = random.random() if a > 0.5: time.sleep((a*0.01)**2) return x+y I1 = parimap( work, xrange(nx), xrange(ny), nprocs=nprocs, eprintignore=Crash) I2 = imapemulation(work, xrange(nx), xrange(ny)) while True: e1, e2 = None, None r1, r2 = None, None end1, end2 = None, None try: r1 = I1.next() except StopIteration: end1 = True except Crash, e1: pass try: r2 = I2.next() except StopIteration: end2 = True except Crash, e2: pass assert r1 == r2, str((r1, r2)) assert type(e1) == type(e2) assert end1 == end2 if end1 or end2: break
if e.errno == errno.ENOLCK: time.sleep(0.01) pass else: raise f.seek(0) assert '' == f.read() f.write('%s' % x) f.flush() # time.sleep(0.01) f.seek(0) f.truncate(0) fcntl.lockf(f, fcntl.LOCK_UN) f.close() fos, fn = tempfile.mkstemp() # (dir='/try/with/nfs/mounted/dir') f = open(fn, 'w') f.close() for x in parimap(work, xrange(100), nprocs=10, eprintignore=()): pass os.close(fos) os.remove(fn) if __name__ == '__main__': util.setup_logging('test_parimap', 'warning') unittest.main()
def build(cls, store_dir, force=False, nworkers=None, continue_=False, step=None, iblock=None): if step is None: steps = range(cls.nsteps) else: steps = [ step ] if iblock is not None and step is None and cls.nsteps != 1: raise store.StoreError('--step option must be given') done = set() status_fn = pjoin(store_dir, '.status') if not continue_ and iblock in (None, -1) and step in (None, 0): store.Store.create_dependants(store_dir, force) if iblock is None: if not continue_: with open(status_fn, 'w') as status: pass else: if iblock is None: try: with open(status_fn, 'r') as status: for line in status: done.add(tuple(int(x) for x in line.split())) except IOError: raise store.StoreError('nothing to continue') shared = {} for step in steps: builder = cls(store_dir, step, shared) if not (0 <= step < builder.nsteps): raise store.StoreError('invalid step: %i' % (step+1)) if iblock in (None, -1): iblocks = [x for x in builder.all_block_indices() if (step, x) not in done] else: if not (0 <= iblock < builder.nblocks): raise store.StoreError('invalid block index %i' % (iblock+1)) iblocks = [iblock] if iblock == -1: for i in iblocks: c = ['fomosto', 'build'] if not os.path.samefile(store_dir, '.'): c.append("'%s'" % store_dir) if builder.nsteps != 1: c.append('--step=%i' % (step+1)) c.append('--block=%i' % (i+1)) print ' '.join(c) return del builder original = signal.signal(signal.SIGINT, signal.SIG_IGN) try: for x in parimap( cls.__work_block, [(store_dir, step, i, shared) for i in iblocks], nprocs=nworkers, eprintignore=Interrupted): store_dir, step, i = x with open(status_fn, 'a') as status: status.write('%i %i\n' % (step, i)) finally: signal.signal(signal.SIGINT, original) os.remove(status_fn)
def command_report(args): import matplotlib matplotlib.use('Agg') from pyrocko import parimap from grond.environment import Environment from grond.report import \ report_index, report_archive, serve_ip, serve_report, read_config, \ write_config, ReportConfig def setup(parser): parser.add_option( '--index-only', dest='index_only', action='store_true', help='create index only') parser.add_option( '--serve', '-s', dest='serve', action='store_true', help='start http service') parser.add_option( '--serve-external', '-S', dest='serve_external', action='store_true', help='shortcut for --serve --host=default --fixed-port') parser.add_option( '--host', dest='host', default='localhost', help='<ip> to start the http server on. Special values for ' '<ip>: "*" binds to all available interfaces, "default" ' 'to default external interface, "localhost" to "127.0.0.1".') parser.add_option( '--port', dest='port', type=int, default=8383, help='set default http server port. Will count up if port is ' 'already in use unless --fixed-port is given.') parser.add_option( '--fixed-port', dest='fixed_port', action='store_true', help='fail if port is already in use') parser.add_option( '--open', '-o', dest='open', action='store_true', help='open report in browser') parser.add_option( '--config', dest='config', metavar='FILE', help='report configuration file to use') parser.add_option( '--write-config', dest='write_config', metavar='FILE', help='write configuration (or default configuration) to FILE') parser.add_option( '--update-without-plotting', dest='update_without_plotting', action='store_true', help='quick-and-dirty update parameter files without plotting') parser.add_option( '--parallel', dest='nparallel', type=int, default=1, help='set number of runs to process in parallel, ' 'If set to more than one, --status=quiet is implied.') parser.add_option( '--threads', dest='nthreads', type=int, default=1, help='set number of threads per process (default: 1).' 'Set to 0 to use all available cores.') parser.add_option( '--no-archive', dest='no_archive', action='store_true', help='don\'t create archive file.') parser, options, args = cl_parse('report', args, setup) s_conf = '' if options.config: try: conf = read_config(options.config) except grond.GrondError as e: die(str(e)) s_conf = ' --config="%s"' % options.config else: from grond import plot conf = ReportConfig( plot_config_collection=plot.get_plot_config_collection()) conf.set_basepath('.') if options.write_config: try: write_config(conf, options.write_config) sys.exit(0) except grond.GrondError as e: die(str(e)) # commandline options that can override config values if options.no_archive: conf.make_archive = False if len(args) == 1 and op.exists(op.join(args[0], 'index.html')): conf.report_base_path = conf.rel_path(args[0]) s_conf = ' %s' % args[0] args = [] report_base_path = conf.expand_path(conf.report_base_path) if options.index_only: report_index(conf) report_archive(conf) args = [] entries_generated = False payload = [] if args and all(op.isdir(rundir) for rundir in args): rundirs = args all_failed = True for rundir in rundirs: payload.append(( [rundir], None, conf, options.update_without_plotting, options.nthreads)) elif args: try: env = Environment(args) for event_name in env.get_selected_event_names(): payload.append(( args, event_name, conf, options.update_without_plotting, options.nthreads)) except grond.GrondError as e: die(str(e)) if payload: entries_generated = [] for result in parimap.parimap( make_report, *zip(*payload), nprocs=options.nparallel): entries_generated.append(result) all_failed = not any(entries_generated) entries_generated = any(entries_generated) if all_failed: die('no report entries generated') report_index(conf) report_archive(conf) if options.serve or options.serve_external: if options.serve_external: host = 'default' else: host = options.host addr = serve_ip(host), options.port serve_report( addr, report_config=conf, fixed_port=options.fixed_port or options.serve_external, open=options.open) elif options.open: import webbrowser url = 'file://%s/index.html' % op.abspath(report_base_path) webbrowser.open(url) else: if not entries_generated and not options.index_only: logger.info('Nothing to do, see: grond report --help') if entries_generated and not (options.serve or options.serve_external): logger.info(CLIHints('report', config=s_conf))
if e.errno == errno.ENOLCK: time.sleep(0.01) pass else: raise f.seek(0) assert '' == f.read() f.write('%s' % x) f.flush() #time.sleep(0.01) f.seek(0) f.truncate(0) fcntl.lockf(f, fcntl.LOCK_UN) f.close() fos, fn = tempfile.mkstemp()#(dir='/try/with/nfs/mounted/dir') f = open(fn, 'w') f.close() for x in parimap(work, xrange(100), nprocs=10, eprintignore=()): pass os.close(fos) os.remove(fn) if __name__ == '__main__': util.setup_logging('test_parimap', 'warning') unittest.main()
def build(cls, store_dir, force=False, nworkers=None, continue_=False, step=None, iblock=None): if step is None: steps = range(cls.nsteps) else: steps = [step] if iblock is not None and step is None and cls.nsteps != 1: raise store.StoreError('--step option must be given') done = set() status_fn = pjoin(store_dir, '.status') if not continue_ and iblock in (None, -1) and step in (None, 0): store.Store.create_dependants(store_dir, force) if iblock is None: if not continue_: with open(status_fn, 'w') as status: pass else: if iblock is None: try: with open(status_fn, 'r') as status: for line in status: done.add(tuple(int(x) for x in line.split())) except IOError: raise store.StoreError('nothing to continue') shared = {} for step in steps: builder = cls(store_dir, step, shared) if not (0 <= step < builder.nsteps): raise store.StoreError('invalid step: %i' % (step + 1)) if iblock in (None, -1): iblocks = [ x for x in builder.all_block_indices() if (step, x) not in done ] else: if not (0 <= iblock < builder.nblocks): raise store.StoreError('invalid block index %i' % (iblock + 1)) iblocks = [iblock] if iblock == -1: for i in iblocks: c = ['fomosto', 'build'] if not os.path.samefile(store_dir, '.'): c.append("'%s'" % store_dir) if builder.nsteps != 1: c.append('--step=%i' % (step + 1)) c.append('--block=%i' % (i + 1)) print ' '.join(c) return del builder original = signal.signal(signal.SIGINT, signal.SIG_IGN) try: for x in parimap(cls.__work_block, [(store_dir, step, i, shared) for i in iblocks], nprocs=nworkers, eprintignore=Interrupted): store_dir, step, i = x with open(status_fn, 'a') as status: status.write('%i %i\n' % (step, i)) finally: signal.signal(signal.SIGINT, original) os.remove(status_fn)
def read_nlloc_bulletin(hypfiles, picks=False): """make a Pandas DataFrame of all earthquakes in the bulletin data set and also a Panel in which the items are station codes and major axis are event IDs. :param hypfiles: path to NLLoc hypocenter phase files (*.loc.hyp) :type hypfiles: str :returns: a data frame of all located events in the catalog and a panel of phase information. :rtype: pandas.core.frame.DataFrame, pandas.core.panel.Panel ### The output catalog data-frame is like this: | index | Lat | Lon | Depth [m] | X [m] | Y [m] | Z [m] | |:------:|:-----:|:-----:|:----------:|:------:|:------:|:------:| | EQid_1 | lat_1 | lon_1 | depth_1 | X_1 | Y_1 | Z_1 | | EQid_n | lat_n | lon_n | depth_n | X_n | Y_n | Z_n | ### The output panel has a 2-level MultiIndex items, so that the values of the 1st and 2nd levels are station codes and phase types, respectively (Item axis: (StaCode, Phase)). The data-frames of the output panel are like this: | index | Residual | TimeCorr | |:------:|:--------:|:--------:| | EQid_1 | res_1 | t_1 | | EQid_2 | res_n | t_n | """ logger = logging.getLogger('read_nlloc_bulletin') logger.setLevel(logging.INFO) if not isinstance(hypfiles, basestring): raise InputError(hypfiles, "Need string or buffer") Files = sorted(glob.glob(hypfiles)) nf = len(Files) if nf == 0: msg = "cannot access *.hyp : No such file or directory" raise InputError(hypfiles, msg) def R(hf): if not is_nlloc_hyp(hf): logger.warning("NLLoc HYP file seems corrupt: %s" % hf) return E = read_nlloc_hyp(hf, picks=picks) return E earthquakes = parimap(R, Files) index = range(len(Files)) columns = ['Lat', 'Lon', 'Depth', 'X', 'Y', 'Z'] events_df = pd.DataFrame(index=index, columns=columns, dtype=np.float) eq2sta = {} # For logger INFO N = map(int, np.arange(0.1, 1.1, 0.1)*nf) P = np.arange(10, 110, 10) D = dict(zip(N, P)) logger.info(" Started scanning ") for ieq, eq in enumerate(earthquakes): if eq: events_df.ix[ieq, columns] = (eq.latitude, eq.longitude, eq.depth, eq.X, eq.Y, eq.Z) eid = op.basename(eq.obsfile).split('.')[0] events_df.rename(index={ieq: eid}, inplace=True) if eq.picks is not None: eq2sta[eid] = eq.picks[['Residual', 'TimeCorr']].transpose() else: continue if (ieq+1) in D: logger.info("%4.0f%% scanned, %7i files" % (D[ieq+1], ieq+1)) logger.info(" Finished scanning ") # --- make a Panel for picks --- picks_pnl = None if eq2sta: picks_pnl = pd.Panel.from_dict(eq2sta, orient='minor', dtype=np.float) picks_pnl = picks_pnl.transpose(0, 2, 1) return Catalog(events_df, picks_pnl)