コード例 #1
0
    def dot(self, b, trunc=0.):

        assert b.ndim == 1
        assert len(b) == self.shape[1]
        assert isinstance(b, np.ndarray)
        self_dot_b = np.zeros_like(b)

        def job_generator():
            for i in range(len(b)):
                yield Job(i)

        def job_handler(i):
            if trunc <= 0.:
                return i, (self.row(i) * b).sum()
            else:
                rows, cols, vals = self.sparse_row(i, trunc=trunc)
                return i, (vals * b[cols]).sum()

        wb = waitbarpipe('dot product')
        with MapAsync(job_handler, job_generator()) as ma:
            for _, (i, v), _, _ in ma:
                self_dot_b[i] = v
                wb.refresh(i / float(self.shape[0]))
            wb.close()

        return self_dot_b
コード例 #2
0
ファイル: extract.py プロジェクト: inthaliba/srfpython
def extract(argv, verbose, mapkwargs):
    for k in argv.keys():
        if k in ['main', "_keyorder"]:
            continue  # private keys

        if k not in authorized_keys:
            raise ValueError('option %s is not recognized' % k)

    rootnames = argv['main']
    if not len(rootnames):
        rootnames = glob.glob(default_rootnames)
    assert len(rootnames)

    for rootname in rootnames:
        if not os.path.isdir(rootname):
            raise ValueError('%s does not exist' % rootname)
        elif not rootname.startswith('_HerrMet_'):
            raise ValueError('%s does not start with _HerrMet_' % rootname)

    if "-pdf" in argv.keys():
        if len(argv['-pdf']) == 0:
            extract_mode = default_extract_mode
            extract_limit = default_extract_limit
            extract_llkmin = default_extract_llkmin
            extract_step = default_extract_step

        elif len(argv['-pdf']) == 4:
            extract_mode, extract_limit, extract_llkmin, extract_step = argv[
                '-pdf']
        else:
            raise ValueError('unexpected number of arguments for option -pdf')

        def gen():
            for rootname in rootnames:
                yield Job(rootname,
                          extract_mode,
                          extract_limit,
                          extract_llkmin,
                          extract_step,
                          verbose,
                          percentiles=[.16, .5, .84],
                          mapkwargs=mapkwargs)

        with MapAsync(_extract_pdf, gen(), **mapkwargs) as ma:
            for _ in ma:
                pass

    if "-top" in argv.keys():
        if len(argv['-top']) == 0:
            top_limit, top_llkmin, top_step = \
                default_top_limit, \
                default_top_llkmin, default_top_step
        elif len(argv['-top']) == 3:
            top_limit, top_llkmin, top_step = argv['-top']
        else:
            raise ValueError('unexpected number of arguments for option -top')

        for rootname in rootnames:
            _extract_top(rootname, top_limit, top_llkmin, top_step, verbose)
コード例 #3
0
def display(argv, verbose, mapkwargs):

    for k in argv.keys():
        if k in ['main', "_keyorder"]:
            continue  # private keys

        if k not in authorized_keys:
            raise Exception('option %s is not recognized' % k)

    rootnames = argv['main']
    if rootnames == []:
        rootnames = glob.glob(default_rootnames)
    assert len(rootnames)

    # -------------------------------------
    if "-cmap" not in argv.keys():
        argv['-cmap'] = [default_cmap]

    try:
        argv['-cmap'] = plt.get_cmap(argv['-cmap'][0])
    except ValueError:
        try:
            argv['-cmap'] = eval("cmaps.%s()" % argv['-cmap'][0])
        except:
            raise Exception(
                'could not find colormap %s neither in matplotlib neither in srfpython.standalone.utils.cmaps'
                % argv['-cmap'][0])

    # ----------- special case, just show the parameterization file from --param : ./_HerrMet.param
    if len(rootnames) == 1 and rootnames[0] == '.':
        _display_function(".", argv=argv, verbose=verbose, mapkwargs=mapkwargs)

    # ----------- general case
    else:
        for rootname in rootnames:
            if not os.path.isdir(rootname):
                raise Exception('%s does not exist' % rootname)
            elif not rootname.startswith('_HerrMet_'):
                raise Exception('%s does not starts with _HerrMet_' % rootname)

        if "-png" not in argv.keys():
            # display mode, cannot parallelize
            for rootname in rootnames:
                _display_function(rootname,
                                  argv=argv,
                                  verbose=verbose,
                                  mapkwargs=mapkwargs)
        else:

            def gen():
                for rootname in rootnames:
                    yield Job(rootname,
                              argv,
                              verbose=verbose,
                              mapkwargs=mapkwargs)

            with MapAsync(_display_function, gen(), **mapkwargs) as ma:
                for _ in ma:
                    pass
コード例 #4
0
ファイル: extract.py プロジェクト: junxie01/srfpython
def extract(argv, verbose, mapkwargs):
    for k in argv.keys():
        if k in ['main', "_keyorder"]:
            continue  # private keys

        if k not in authorized_keys:
            raise Exception('option %s is not recognized' % k)

    rootnames = argv['main']
    if rootnames == []:
        rootnames = glob.glob(default_rootnames)
    assert len(rootnames)

    for rootname in rootnames:
        if not os.path.isdir(rootname):
            raise Exception('%s does not exist' % rootname)
        elif not rootname.startswith('_HerrMet_'):
            raise Exception('%s does not starts with _HerrMet_' % rootname)

    assert "-pdf" not in argv.keys() or len(
        argv["-pdf"]) == 4  # unexpected argument number
    if "-pdf" not in argv.keys():
        extract_mode, extract_limit, extract_llkmin, extract_step = \
            default_extract_mode, default_extract_limit, \
            default_extract_llkmin, default_extract_step
    elif len(argv['-pdf']) == 4:
        extract_mode, extract_limit, extract_llkmin, extract_step = argv[
            '-pdf']

    def gen():
        for rootname in rootnames:
            yield Job(rootname,
                      extract_mode,
                      extract_limit,
                      extract_llkmin,
                      extract_step,
                      verbose,
                      percentiles=[.16, .5, .84],
                      mapkwargs=mapkwargs)

    with MapAsync(_extract_function, gen(), **mapkwargs) as ma:
        for _ in ma:
            pass
コード例 #5
0
    def __call__(self, M, verbose=True):
        nx, ny, nz = self.nx, self.ny, self.nz
        Nobs = self.Nobs

        M = M.reshape((nz, ny, nx))
        theorys = self.theorys

        def job_generator():
            for iy in range(ny):
                for jx in range(nx):  # order matters!!!!
                    nnode = iy * nx + jx
                    yield Job(nnode, theorys[iy, jx], M[:, iy, jx])

        def job_handler(nnode, theory, m):
            data = theory(m=m)
            return nnode, data

        wb = None
        if verbose:
            wb = waitbarpipe('g(m)')

        Data = np.zeros(Nobs.sum(), float)
        with MapAsync(job_handler, job_generator(), **self.mapkwargs) as ma:

            for nnode, (nnode, data), _, _ in ma:
                ib = Nobs[:nnode].sum()
                ie = ib + Nobs[nnode]

                Data[ib: ie] = data

                if verbose:
                    wb.refresh(nnode / float(nx * ny))

        if verbose:
            wb.close()

        return Data  # warning : Data means encoded data
コード例 #6
0
    def sparse(self, trunc=2.0):

        def job_generator():
            for i in range(self.shape[0]):
                yield Job(i)

        def job_handler(i):
            _rows, _cols, _vals = self.sparse_row(i, trunc=trunc)
            return _rows, _cols, _vals

        rows = []
        cols = []
        vals = []
        with MapAsync(job_handler, job_generator(), Nworkers=40) as ma:
            for _, (_rows, _cols, _vals), _, _ in ma:
                rows.append(_rows)
                cols.append(_cols)
                vals.append(_vals)

        rows = np.hstack(rows)
        cols = np.hstack(cols)
        vals = np.hstack(vals)

        return sp.csc_matrix((vals, (rows, cols)))
コード例 #7
0
ファイル: run.py プロジェクト: inthaliba/srfpython
def run(argv, verbose, mapkwargs):

    for k in argv.keys():
        if k in ['main', "_keyorder"]:
            continue  # private keys

        if k not in authorized_keys:
            raise Exception('option %s is not recognized' % k)

    rootnames = argv['main']
    if rootnames == []:
        rootnames = glob.glob(default_rootnames)
    assert len(rootnames)

    runmode = argv['-mode'][0] if "-mode" in argv.keys() else default_mode
    assert runmode in ['restart', 'append', 'skip']
    Nchain = int(
        argv['-nchain'][0]) if "-nchain" in argv.keys() else default_nchain
    Nkeep = int(
        argv['-nkeep'][0]) if "-nkeep" in argv.keys() else default_nkeep

    # ------------------------
    def gen(rootnames, runmode):

        for rootname in rootnames:
            targetfile = "%s/_HerrMet.target" % rootname
            paramfile = "%s/_HerrMet.param" % rootname
            runfile = "%s/_HerrMet.run" % rootname

            if runmode == "append" and not os.path.exists(runfile):
                runmode = "restart"
            elif runmode == "restart" and os.path.exists(runfile):
                os.remove(runfile)
            elif runmode == "skip" and os.path.exists(runfile):
                print("skip %s" % rootname)
                continue
            print(rootname)
            # ------
            p, logRHOM = load_paramfile(paramfile)
            # ------
            d = makedatacoder(
                targetfile,
                which=Datacoder_log)  # datacoder based on observations
            dobs, CDinv = d.target()
            duncs = CDinv**-.5
            ND = len(dobs)
            dinfs = d(0.1 * np.ones_like(d.values))
            dsups = d(3.8 * np.ones_like(d.values))
            logRHOD = LogGaussND(dobs,
                                 duncs,
                                 dinfs,
                                 dsups,
                                 k=1000.,
                                 nanbehavior=1)
            # ------
            G = Theory(parameterizer=p, datacoder=d)
            # ---------------------------------
            if runmode == "restart" or runmode == "skip":
                with RunFile(runfile, create=True, verbose=verbose) as rundb:
                    rundb.drop()
                    rundb.reset(p.NLAYER, d.waves, d.types, d.modes, d.freqs)
            elif runmode == "append":
                pass
            else:
                raise Exception('unexpected runmode %s' % runmode)

            # ---------------------------------
            for chainid in range(Nchain):
                M0 = np.random.rand(len(p.MINF)) * (p.MSUP - p.MINF) + p.MINF
                MSTD = p.MSTD
                yield Job(runfile=runfile,
                          rootname=rootname,
                          chainid=chainid,
                          M0=M0,
                          MSTD=MSTD,
                          G=G,
                          ND=ND,
                          logRHOD=logRHOD,
                          logRHOM=logRHOM,
                          p=p,
                          d=d,
                          nkeep=Nkeep,
                          verbose=verbose)

    # ---------------------------------
    def fun(worker, rootname, runfile, chainid, M0, MSTD, G, ND, logRHOD,
            logRHOM, p, d, nkeep, verbose):

        models, datas, weights, llks = metropolis(
            M0,
            MSTD,
            G,
            ND,
            logRHOD,
            logRHOM,
            nkeep=nkeep,
            normallaw=worker.randn,
            unilaw=worker.rand,
            chainid=chainid,
            HL=10,
            IK0=0.25,
            MPMIN=1.e-6,
            MPMAX=1e6,
            adjustspeed=0.3,
            nofail=True,
            debug=False,
            verbose=verbose,
            head="%10s " % rootname.split('_HerrMet_')[-1])

        I = np.any(~np.isnan(datas), axis=1)
        models, datas, weights, llks = models[I, :], datas[
            I, :], weights[I], llks[I]

        return runfile, models, datas, weights, llks, p, d

    # ---------------------------------
    with MapAsync(fun, gen(rootnames, runmode), **mapkwargs) as ma:
        for jobid, answer, _, _ in ma:
            runfile, models, datas, weights, llks, p, d = answer
            if verbose:
                print('=> write to %s' % runfile)
            with RunFile(runfile, verbose=False) as rundb:
                rundb.begintransaction()
                try:
                    rundb.insert("METROPOLIS", models, datas, weights, llks, p,
                                 d)
                    rundb.commit()
                except:
                    rundb.rollback(crash=True)
コード例 #8
0
def neldermead(argv, verbose, mapkwargs):
    raise Exception('not ready')
    for k in argv.keys():
        if k in ['main', "_keyorder"]:
            continue  # private keys

        if k not in authorized_keys:
            raise Exception('option %s is not recognized' % k)

    rootnames = argv['main']
    if rootnames == []:
        rootnames = glob.glob(default_rootnames)
    assert len(rootnames)
    for rootname in rootnames:
        runfile = "%s/_HerrMet.run" % rootname
        assert os.path.exists(runfile)

    assert argv["-top"] == [] or len(argv["-top"]) == 3  # unexpected argument number
    if argv["-top"] == []:
        top_llkmin, top_limit, top_step = default_top_llkmin, default_top_limit, default_top_step
    elif len(argv['-top']) == 3:
        top_llkmin, top_limit, top_step = argv['-top']
    print "top : llkmin %f, limit %d, step %d" % (top_llkmin, top_limit, top_step)

    # ------------------------
    def gen(rootnames):

        for rootname in rootnames:
            targetfile = "%s/_HerrMet.target" % rootname
            paramfile = "%s/_HerrMet.param" % rootname
            runfile = "%s/_HerrMet.run" % rootname

            # ------
            p, logRHOM = load_paramfile(paramfile)
            # ------
            d = makedatacoder(targetfile, which=Datacoder_log)  # datacoder based on observations
            dobs, CDinv = d.target()
            duncs = CDinv ** -.5
            ND = len(dobs)
            dinfs = d(0.1 * np.ones_like(d.values))
            dsups = d(3.5 * np.ones_like(d.values))
            logRHOD = LogGaussND(dobs, duncs, dinfs, dsups, k=1000., nanbehavior=1)
            # ------
            G = Theory(parameterizer=p, datacoder=d)
            # ---------------------------------
            with RunFile(runfile, verbose=verbose) as rundb:
                best = list(rundb.get(llkmin=top_llkmin, limit=top_limit, step=top_step, algo=None))

            # ---------------------------------
            for modelid, chainid, weight, llk, nlayer, model, dat in best:
                M0 = p(*model)
                DM = 1.0 #p.MSTD

                yield Job(runfile=runfile,
                          rootname=rootname,
                          chainid=chainid,
                          M0=M0,
                          DM=DM,
                          G=G,
                          ND=ND,
                          logRHOD=logRHOD,
                          logRHOM=logRHOM,
                          p=p, d=d,
                          verbose=verbose)

    def fun(runfile, rootname, chainid, M0, DM, G, ND, logRHOD, logRHOM, p, d, verbose):
        models, datas, llks = neldermead_function(M0, DM, G, ND, logRHOD, logRHOM,
                                                  alpha=1.0,
                                                  beta=0.9,
                                                  gamma=1.2,
                                                  maxiter=1000,
                                                  interrupt=1e-12,
                                                  debug=1)

        weights = np.ones_like(llks)
        I = np.any(~np.isnan(datas), axis=1)
        models, datas, weights, llks = models[I, :], datas[I, :], weights[I], llks[I]

        I = argunique(llks)
        models, datas, weights, llks = models[I, :], datas[I, :], weights[I], llks[I]

        return runfile, models, datas, weights, llks, p, d


    with MapAsync(fun, gen(rootnames), **mapkwargs) as ma:
        for jobid, answer, _, _ in ma:
            runfile, models, datas, weights, llks, p, d = answer
            if verbose:
                print '=> write to %s' % runfile
            with RunFile(runfile, verbose=False) as rundb:
                rundb.begintransaction()
                try:
                    rundb.insert("NELDERMEAD", models, datas, weights, llks, p, d)
                    rundb.commit()
                except:
                    rundb.rollback(crash=True)


            rd = DepthDispDisplay(targetfile=runfile.replace('.run', '.target'))
            for model, data, llk in zip(models, datas, llks):
                rd.plotmodel(color="k", alpha=0.2, showvp=True, showvs=True, showrh=True,
                          showpr=True, *p.inv(model))

                rd.plotdisp(d.waves,d.types,d.modes,d.freqs,d.inv(data), dvalues=None, color="k", alpha=0.2)
            showme()
            plt.close('all')