Beispiel #1
0
def _default_rcut(th):
    """
    Look for the first minimum in the partial g(r)
    """
    from atooms.system.particle import distinct_species
    from atooms.postprocessing.partial import Partial
    from atooms.postprocessing import RadialDistributionFunction
    from .helpers import ifabsmm

    ids = distinct_species(th[0].particle)
    gr = Partial(RadialDistributionFunction, ids, th, dr=0.1)
    gr.do(update=False)
    rcut = {}
    for isp in ids:
        for jsp in ids:
            # First find absolute maximum
            _, m = ifabsmm(list(gr.partial[(isp, jsp)].grid),
                           list(gr.partial[(isp, jsp)].value))
            # Then look for first minimum after the maximum
            for i in range(len(gr.partial[(isp, jsp)].grid)):
                if gr.partial[(isp, jsp)].grid[i] >= m[0]:
                    delta = gr.partial[(isp, jsp)].value[i+1] - gr.partial[(isp, jsp)].value[i]
                    if delta >= 0:
                        rcut[(isp, jsp)] = gr.partial[(isp, jsp)].grid[i]
                        break
    
    return rcut
Beispiel #2
0
def gr(input_file,
       dr=0.04,
       grandcanonical=False,
       ndim=-1,
       rmax=-1.0,
       *input_files,
       **global_args):
    """Radial distribution function"""
    global_args = _compat(global_args)
    if global_args['legacy']:
        backend = pp.RadialDistributionFunctionLegacy
    else:
        backend = pp.RadialDistributionFunction

    for th in _get_trajectories([input_file] + list(input_files), global_args):
        th._grandcanonical = grandcanonical
        cf = backend(th,
                     dr=dr,
                     rmax=rmax,
                     norigins=global_args['norigins'],
                     ndim=ndim)
        if global_args['filter'] is not None:
            cf = pp.Filter(cf, global_args['filter'])
        cf.do(update=global_args['update'])

        ids = distinct_species(th[0].particle)
        if len(ids) > 1 and not global_args['no_partial']:
            cf = Partial(backend,
                         ids,
                         th,
                         dr=dr,
                         rmax=rmax,
                         norigins=global_args['norigins'],
                         ndim=ndim)
            cf.do(update=global_args['update'])
Beispiel #3
0
def qt(input_file,
       tmax=-1.0,
       tmax_fraction=0.75,
       tsamples=60,
       func='logx',
       *input_files,
       **global_args):
    """Collective overlap correlation function"""
    global_args = _compat(global_args)
    func = _func_db[func]
    for th in _get_trajectories([input_file] + list(input_files), global_args):
        if tmax > 0:
            t_grid = [0.0] + func(th.timestep, tmax, tsamples)
        elif tmax_fraction > 0:
            t_grid = [0.0] + func(th.timestep, tmax_fraction * th.total_time,
                                  tsamples)
        else:
            t_grid = None
        pp.CollectiveOverlap(
            th, t_grid,
            norigins=global_args['norigins']).do(update=global_args['update'])
        ids = distinct_species(th[0].particle)
        if len(ids) > 1:
            Partial(pp.CollectiveOverlap,
                    ids,
                    th,
                    t_grid,
                    norigins=global_args['norigins']).do(
                        update=global_args['update'])
Beispiel #4
0
def alpha2(input_file,
           tmax=-1.0,
           tmax_fraction=0.75,
           tsamples=60,
           func='logx',
           *input_files,
           **global_args):
    """Non-Gaussian parameter"""
    global_args = _compat(global_args)
    func = _func_db[func]
    for th in _get_trajectories([input_file] + list(input_files), global_args):
        if tmax > 0:
            t_grid = [0.0] + func(th.timestep, tmax, tsamples)
        elif tmax_fraction > 0:
            t_grid = [0.0] + func(th.timestep, tmax_fraction * th.total_time,
                                  tsamples)
        else:
            t_grid = None
        pp.NonGaussianParameter(
            th, t_grid,
            norigins=global_args['norigins']).do(update=global_args['update'])
        ids = distinct_species(th[0].particle)
        if len(ids) > 1:
            Partial(pp.NonGaussianParameter,
                    ids,
                    th,
                    t_grid,
                    norigins=global_args['norigins']).do(
                        update=global_args['update'])
Beispiel #5
0
def vacf(input_file,
         tmax=-1.0,
         tmax_fraction=0.10,
         tsamples=30,
         func='linear',
         *input_files,
         **global_args):
    """Velocity autocorrelation function"""
    global_args = _compat(global_args)
    func = _func_db[func]
    for th in _get_trajectories([input_file] + list(input_files), global_args):
        if tmax > 0:
            t_grid = [0.0] + func(th.timestep, min(th.total_time, tmax),
                                  tsamples)
        elif tmax_fraction > 0:
            t_grid = [0.0] + func(th.timestep, tmax_fraction * th.total_time,
                                  tsamples)
        else:
            t_grid = None
        pp.VelocityAutocorrelation(
            th, t_grid,
            norigins=global_args['norigins']).do(update=global_args['update'])
        ids = distinct_species(th[0].particle)
        if len(ids) > 1:
            Partial(pp.VelocityAutocorrelation,
                    ids,
                    th,
                    t_grid,
                    norigins=global_args['norigins']).do(
                        update=global_args['update'])
Beispiel #6
0
    def test_gr_partial_2(self):
        from atooms.postprocessing.partial import Partial
        f = os.path.join(self.reference_path, 'kalj-small.xyz')
        ts = trajectory.TrajectoryXYZ(f)
        ref = {}
        ref[('A', 'A')] = numpy.array([0., 0.00675382, 0.27087136, 1.51486318])
        ref[('B', 'B')] = numpy.array(
            [0.31065645, 0.51329066, 0.67485665, 0.78039485])
        ref[('A', 'B')] = numpy.array(
            [4.25950671, 3.86572027, 2.70020052, 1.78935426])

        gr = Partial(postprocessing.RadialDistributionFunction, ['A', 'B'], ts)
        gr.compute()
        for ab in [('A', 'A'), ('A', 'B'), ('B', 'B')]:
            self.assertLess(deviation(gr.partial[ab].value[21:25], ref[ab]),
                            4e-2)
Beispiel #7
0
def fskt(input_file,
         tmax=-1.0,
         tmax_fraction=0.75,
         tsamples=60,
         kmin=7.0,
         kmax=8.0,
         ksamples=1,
         dk=0.1,
         nk=8,
         kgrid=None,
         func='logx',
         total=False,
         fix_cm=False,
         lookup_mb=64.0,
         *input_files,
         **global_args):
    """Self intermediate scattering function"""
    global_args = _compat(global_args)
    func = _func_db[func]
    if global_args['legacy']:
        backend = pp.SelfIntermediateScatteringLegacy
    else:
        backend = pp.SelfIntermediateScattering
    for th in _get_trajectories([input_file] + list(input_files), global_args):
        if tmax > 0:
            t_grid = [0.0] + func(th.timestep, tmax, tsamples)
        elif tmax_fraction > 0:
            t_grid = [0.0] + func(th.timestep, tmax_fraction * th.total_time,
                                  tsamples)
        else:
            t_grid = None
        if kgrid is not None:
            k_grid = [float(_) for _ in kgrid.split(',')]
        else:
            k_grid = linear_grid(kmin, kmax, ksamples)
        if total:
            backend(th,
                    k_grid,
                    t_grid,
                    nk,
                    dk=dk,
                    norigins=global_args['norigins'],
                    fix_cm=fix_cm,
                    lookup_mb=lookup_mb).do(update=global_args['update'])
        ids = distinct_species(th[0].particle)
        if len(ids) > 1:
            Partial(backend,
                    ids,
                    th,
                    k_grid,
                    t_grid,
                    nk,
                    dk=dk,
                    norigins=global_args['norigins'],
                    fix_cm=fix_cm,
                    lookup_mb=lookup_mb).do(update=global_args['update'])
Beispiel #8
0
def sk(input_file,
       nk=20,
       dk=0.1,
       kmin=-1.0,
       kmax=15.0,
       ksamples=30,
       kgrid=None,
       weight=None,
       weight_trajectory=None,
       weight_fluctuations=False,
       *input_files,
       **global_args):
    """
    Structure factor
    """
    from atooms.trajectory import TrajectoryXYZ
    global_args = _compat(global_args)
    if global_args['fast']:
        backend = pp.StructureFactorFast
    else:
        backend = pp.StructureFactorLegacy
    if kgrid is not None:
        kgrid = [float(_) for _ in kgrid.split(',')]
    for th in _get_trajectories([input_file] + list(input_files), global_args):
        cf = backend(th,
                     kgrid=kgrid,
                     norigins=global_args['norigins'],
                     kmin=kmin,
                     kmax=kmax,
                     nk=nk,
                     dk=dk,
                     ksamples=ksamples)
        if global_args['filter'] is not None:
            cf = pp.Filter(cf, global_args['filter'])
        if weight_trajectory is not None:
            weight_trajectory = TrajectoryXYZ(weight_trajectory)
        cf.add_weight(trajectory=weight_trajectory,
                      field=weight,
                      fluctuations=weight_fluctuations)
        cf.do(update=global_args['update'])

        ids = distinct_species(th[0].particle)
        if len(ids) > 1 and not global_args['no_partial']:
            cf = Partial(backend,
                         ids,
                         th,
                         kgrid=kgrid,
                         norigins=global_args['norigins'],
                         kmin=kmin,
                         kmax=kmax,
                         nk=nk,
                         dk=dk,
                         ksamples=ksamples)
            cf.add_weight(trajectory=weight_trajectory,
                          field=weight,
                          fluctuations=weight_fluctuations)
            cf.do(update=global_args['update'])
Beispiel #9
0
def fkt(input_file,
        tmax=-1.0,
        tmax_fraction=0.75,
        tsamples=60,
        kmin=7.0,
        kmax=7.0,
        ksamples=1,
        dk=0.1,
        nk=100,
        kgrid=None,
        func='logx',
        fix_cm=False,
        *input_files,
        **global_args):
    """Total intermediate scattering function"""
    global_args = _compat(global_args)
    func = _func_db[func]
    for th in _get_trajectories([input_file] + list(input_files), global_args):
        if tmax > 0:
            t_grid = [0.0] + func(th.timestep, tmax, tsamples)
        elif tmax_fraction > 0:
            t_grid = [0.0] + func(th.timestep, tmax_fraction * th.total_time,
                                  tsamples)
        else:
            t_grid = None
        if kgrid is not None:
            k_grid = [float(_) for _ in kgrid.split(',')]
        else:
            k_grid = linear_grid(kmin, kmax, ksamples)
        ids = distinct_species(th[0].particle)
        if len(ids) > 1:
            Partial(pp.IntermediateScattering,
                    ids,
                    th,
                    k_grid,
                    t_grid,
                    norigins=global_args['norigins'],
                    nk=nk,
                    dk=dk,
                    fix_cm=fix_cm).do(update=global_args['update'])
Beispiel #10
0
def chi4qs(input_file,
           tsamples=60,
           a=0.3,
           tmax=-1.0,
           func='logx',
           tmax_fraction=0.75,
           total=False,
           *input_files,
           **global_args):
    """Dynamic susceptibility of self overlap"""
    global_args = _compat(global_args)
    func = _func_db[func]
    if global_args['fast']:
        backend = pp.Chi4SelfOverlapOpti
    else:
        backend = pp.Chi4SelfOverlap

    for th in _get_trajectories([input_file] + list(input_files), global_args):
        if tmax > 0:
            t_grid = [0.0] + func(th.timestep, min(th.total_time, tmax),
                                  tsamples)
        elif tmax_fraction > 0:
            t_grid = [0.0] + func(th.timestep, tmax_fraction * th.total_time,
                                  tsamples)
        else:
            t_grid = None
        if total:
            backend(th, t_grid, a=a, norigins=global_args['norigins']).do(
                update=global_args['update'])
        ids = distinct_species(th[0].particle)
        if not total and len(ids) > 1:
            Partial(backend,
                    ids,
                    th,
                    t_grid,
                    a=a,
                    norigins=global_args['norigins']).do(
                        update=global_args['update'])
Beispiel #11
0
def msd(input_file,
        tmax=-1.0,
        tmax_fraction=0.75,
        tsamples=30,
        sigma=1.0,
        func='linear',
        rmsd_max=-1.0,
        fix_cm=False,
        *input_files,
        **global_args):
    """Mean square displacement"""
    func = _func_db[func]
    global_args = _compat(global_args)
    for th in _get_trajectories([input_file] + list(input_files), global_args):
        dt = th.timestep
        if tmax > 0:
            t_grid = [0.0] + func(dt, min(th.total_time, tmax), tsamples)
        elif tmax_fraction > 0:
            t_grid = [0.0] + func(dt, tmax_fraction * th.total_time, tsamples)
        else:
            t_grid = None
        ids = distinct_species(th[0].particle)
        pp.MeanSquareDisplacement(
            th,
            tgrid=t_grid,
            norigins=global_args['norigins'],
            sigma=sigma,
            rmax=rmsd_max,
            fix_cm=fix_cm).do(update=global_args['update'])
        if len(ids) > 1:
            Partial(pp.MeanSquareDisplacement,
                    ids,
                    th,
                    tgrid=t_grid,
                    norigins=global_args['norigins'],
                    sigma=sigma,
                    rmax=rmsd_max).do(update=global_args['update'])