示例#1
0
def gas_alpha(sim=False, t_range=[0, -1], OVERWRITE=False):
    """ This calculates the global alpha-value from ts object via
    		alphah=(2./3.)*(ts3.uxuym)/(ts3.rhom*ts3.csm**2)
        for the lastNentries of the time series.
        Input:
            t_range         use this time range of the timeseries
            OVERWRITE       overwrite alpha file in sim.pc_datadir/alpha_<lastNentries>

        return:
            alpha dictionary
    """

    from pencil import get_sim
    from pencil.sim import sim
    from pencil import io
    from os.path import exists, join
    import numpy as np

    def empirical_std_deviation(x):
        """
        (Geschaetzte) Streuung der Messwerte x um den (unbekannten) wahren Wert
        (estimated) stray of the measurments x around the (unknown) true value

        s(x) = SQRT( 1./(M-1) * SUM( (x-<x>)**2 ) )"""
        import numpy as np
        x = np.array(x)
        M = np.size(x)
        xm = np.mean(x)

        #return np.sqrt(1./(M-1.)*np.sum((x-xm)**2))
        return np.sqrt(M / (M - 1.) * ((1. / M * np.sum(x**2)) - xm**2))

    def std_deviation_of_mean_value(x):
        """
        Messunsicherheit des Mittelwertes <x>
        Empirical standarddeviation of the arithmetical mean value

        s(<x>) = s(x)/SQRT( M ) = SQRT( 1./(M-1) * SUM( (x-<x>)**2 ) ) / SQRT( M )"""

        import numpy as np
        x = np.array(x)
        M = np.size(x)

        if M == 1: return 0

        return empirical_std_deviation(x) / np.sqrt(M)

    if type(sim) == sim.__Simulation__:
        SIM = sim
    else:
        SIM = get_sim()

    filename = 'alpha_' + str(t_range[0]) + '_' + str(t_range[1])

    ## skip if nothing is new
    if not OVERWRITE and io.exists(name=filename, sim=SIM):
        print('~ Alpha for "' + SIM.name + '" already exists. Loading file...')
        return io.load(name=filename, sim=SIM)

    print('~ Calculating alpha for "' + SIM.name + '" in "' + SIM.path + '"')

    ## import time series object
    try:
        print('~ reading time series..')
        ts = SIM.get_ts()
    except:
        print('! ERROR: Couldnt read time series!')
        return False

    ## correct if csm quantity is not correctly exported
    try:
        csm = ts.csm
        if csm[0] == 0: csm = 1.
    except:
        print(
            '? WARNING: Couldnt find >csm< in time series, set it to csm = 1. This may be incorrect!'
        )
        csm = 1.

    if t_range[1] == -1: t_range[1] = ts.t[-1]
    id_min = np.argmin(np.abs(ts.t - t_range[0]))
    id_max = np.argmin(np.abs(ts.t - t_range[1]))

    alpha_dict = {}

    ## calculate alpha
    print('~ calculating alpha, its mean value and standard deviation for ' +
          str(t_range))
    alpha_dict['alpha'] = -(2. / 3.) * (ts.uxuym) / (ts.rhom * csm**2)
    alpha_dict['alpha_mean'] = np.mean(alpha_dict['alpha'][id_min:id_max])
    alpha_dict['alpha_stddev'] = std_deviation_of_mean_value(
        alpha_dict['alpha'][id_min:id_max])
    print('~ alpha_mean = ' + str(alpha_dict['alpha_mean']) +
          ' and alpha_stddev = ' + str(alpha_dict['alpha_stddev']))

    ## calculate alpha minus mean_flux
    print(
        '~ calculating alpha minus mean_flux (alpha_mmf), its mean value and standard deviation'
    )
    alpha_dict['alpha_mmf'] = -(2. / 3.) * (ts.uxuym - ts.uxm * ts.uym) / (
        ts.rhom * csm**2)
    alpha_dict['alpha_mmf_mean'] = np.mean(
        alpha_dict['alpha_mmf'][id_min:id_max])
    alpha_dict['alpha_mmf_stddev'] = std_deviation_of_mean_value(
        alpha_dict['alpha_mmf'][id_min:id_max])
    print('~ alpha_mmf_mean = ' + str(alpha_dict['alpha_mmf_mean']) +
          ' and alpha_mmf_stddev = ' + str(alpha_dict['alpha_mmf_stddev']))

    import math
    for v in alpha_dict.values():
        if type(v) == type(1.1) and math.isnan(v):
            io.debug_breakpoint()

    ## save alpha in plk
    print('~ saving alpha values in ' + SIM.pc_datadir + '/' + filename)
    io.save(alpha_dict, filename, folder=SIM.pc_datadir)

    return alpha_dict
示例#2
0
def dispersion_and_drift(sim=False,
                         OVERWRITE=False,
                         GLOBAL=True,
                         LOCAL=True,
                         use_IDL=False,
                         recalculate_gas_velo_at_particle_pos=False):
    """This calculates the dispersion (sigma) and drift (zeta) locally and globally
    by using the gas_velo_at_particle_pos script and dataset for all particles.

    With sigma = sqrt( 1/N_par * sum_i^N_par( (v_par(i) - <v_par>)^2 ) ) and
    zeta = sqrt( 1/N_par * sum_i^N_par( (v_par(i) - u(xp_i))^2 ) )

    Arg:
	  OVERWRITE:		set to True to overwrite already calculated results
      GLOBAL:         Calculate drift and dispersion globally, i.e. whole simulation domain
      LOCAL:          Calculate drift and dispersion locally, i.e. grid cell wise
      recalculate_gas_velo_at_particle_pos:
                    if the dataset shall be recalcualted
      use_IDL:  use backup solution of IDL script and sav files

      returns True if successfull
    """

    from pencil import get_sim
    from pencil import io
    from pencil import read
    from pencil.diag.particle import gas_velo_at_particle_pos
    from scipy.io import readsav
    from os import listdir
    from os.path import exists, join, dirname
    import numpy as np

    if sim == False:
        sim = get_sim()
        if sim == False:
            print('! ERROR: Specify simulation object!')
            return False
    SIM = sim

    ## calculate gas speed at particle position dataset
    gas_velo_at_particle_pos(OVERWRITE=recalculate_gas_velo_at_particle_pos,
                             use_IDL=use_IDL,
                             sim=sim)

    print(
        '\n##################### Starting the whole calculation process of DISPERSON and DRIFT for '
        + SIM.name + ' #####################')

    ## default and setup
    GASVELO_DESTINATION = 'gas_velo_at_particle_pos'
    GASVELO_DIR = join(SIM.pc_datadir, GASVELO_DESTINATION)

    ## get list of available files
    if use_IDL:
        file_filetype = '.sav'
    else:
        file_filetype = '.pkl'
    files = []
    if exists(GASVELO_DIR):
        files = [
            i for i in listdir(GASVELO_DIR)
            if i.startswith(GASVELO_DESTINATION) and i.endswith(file_filetype)
        ]
    if files == []:
        print(
            '!! ERROR: No calc_gas_speed_at_particle_position-files found for '
            + SIM.name + '! Use idl script to produce them first!')
    if use_IDL:
        USE_PKL_FILES = False
        files = [i.split('_')[-1].split(file_filetype)[0] for i in files]
        scheme = ''
    else:
        USE_PKL_FILES = True
        files = [i.split('_')[-1].split(file_filetype)[0] for i in files]
        scheme = '_tsc'

    ## calculate global dispersion for all snapshot for which gas_velo_at_particle_pos files are found
    for file_no in files:
        print(
            '## Starting the calculation for DISPERSON and DRIFT for  ### VAR'
            + str(file_no) + ' ###')

        # check if files already exist
        if (not OVERWRITE) and io.exists(
                'sigma_' + file_no, folder=SIM.pc_datadir) and io.exists(
                    'zeta_' + file_no, folder=SIM.pc_datadir) and io.exists(
                        'sigma_l_' + file_no,
                        folder=join(SIM.pc_datadir, 'sigma_l')) and io.exists(
                            'zeta_l_' + file_no,
                            folder=join(SIM.pc_datadir, 'zeta_l')):
            print('## Skipping calculations')
            continue

        ## read sav and var file
        print('## reading gas_velo_at_particle_pos file and VAR')
        if USE_PKL_FILES:
            sav_file = io.load(
                join(GASVELO_DIR, GASVELO_DESTINATION + scheme + '_' +
                     file_no + '.pkl'))[GASVELO_DESTINATION]
        else:
            sav_file = readsav(
                join(GASVELO_DIR, GASVELO_DESTINATION + scheme + '_' +
                     file_no + '.sav'))[GASVELO_DESTINATION]
        var_file = read.var(varfile='VAR' + file_no,
                            quiet=True,
                            trimall=True,
                            datadir=SIM.datadir)

        ## get everything ready
        dim = SIM.dim
        pdim = read.pdim(sim=SIM)
        npar = pdim.npar
        npar1 = 1. / npar

        ## get first quantities and setup the DATA_SET
        if use_IDL:
            time = sav_file['time'][0]
            DATA_SET = np.core.records.fromarrays(
                [
                    range(0, npar), sav_file['par_idx'][0][0].astype('int'),
                    sav_file['par_idx'][0][1].astype('int'),
                    sav_file['par_idx'][0][2].astype('int'),
                    sav_file['par_pos'][0][0], sav_file['par_pos'][0][1],
                    sav_file['par_pos'][0][2], sav_file['par_velo'][0][0],
                    sav_file['par_velo'][0][1], sav_file['par_velo'][0][2],
                    sav_file['npar'][0], sav_file['gas_velo'][0][0],
                    sav_file['gas_velo'][0][1], sav_file['gas_velo'][0][2],
                    var_file.rho[sav_file['par_idx'][0][2].astype('int'),
                                 sav_file['par_idx'][0][1].astype('int'),
                                 sav_file['par_idx'][0][0].astype('int')],
                    var_file.rhop[sav_file['par_idx'][0][2].astype('int'),
                                  sav_file['par_idx'][0][1].astype('int'),
                                  sav_file['par_idx'][0][0].astype('int')]
                ],
                names=
                'parid, idx, idy, idz, posx, posy, posz, vx, vy, vz, npar, gasv_x, gasv_y, gasv_z, rho, rhop',
                formats=
                'int, int,int,int, float,float,float, float,float,float, int, float,float,float, float, float'
            )
        else:
            time = sav_file['time']
            DATA_SET = np.core.records.fromarrays(
                [
                    range(0, npar), sav_file['par_idx'][0].astype('int'),
                    sav_file['par_idx'][1].astype('int'),
                    sav_file['par_idx'][2].astype('int'),
                    sav_file['par_pos'][0], sav_file['par_pos'][1],
                    sav_file['par_pos'][2], sav_file['par_velo'][0],
                    sav_file['par_velo'][1], sav_file['par_velo'][2],
                    sav_file['npar'], sav_file['gas_velo'][0],
                    sav_file['gas_velo'][1], sav_file['gas_velo'][2],
                    var_file.rho[sav_file['par_idx'][2].astype('int'),
                                 sav_file['par_idx'][1].astype('int'),
                                 sav_file['par_idx'][0].astype('int')],
                    var_file.rhop[sav_file['par_idx'][2].astype('int'),
                                  sav_file['par_idx'][1].astype('int'),
                                  sav_file['par_idx'][0].astype('int')]
                ],
                names=
                'parid, idx, idy, idz, posx, posy, posz, vx, vy, vz, npar, gasv_x, gasv_y, gasv_z, rho, rhop',
                formats=
                'int, int,int,int, float,float,float, float,float,float, int, float,float,float, float, float'
            )

        DATA_SET = np.sort(DATA_SET, order=['idx', 'idy', 'idz'])

        # calculate GLOBAL DISPERSION in x, y and z direction, also the absolute magnitude
        if GLOBAL:
            print(
                '## Calculating GLOBAL DISPERSION values in x,y,z direction and abs value'
            )
            mean_vx = np.mean(DATA_SET['vx'])
            mean_vy = np.mean(DATA_SET['vy'])
            mean_vz = np.mean(DATA_SET['vz'])
            SIGMA = {
                'SIGMA_o_x':
                np.sqrt(npar1 * np.sum((DATA_SET['vx'] - mean_vx)**2)),
                'SIGMA_o_y':
                np.sqrt(npar1 * np.sum((DATA_SET['vy'] - mean_vy)**2)),
                'SIGMA_o_z':
                np.sqrt(npar1 * np.sum((DATA_SET['vz'] - mean_vz)**2)),
                'SIGMA_o':
                np.sqrt(npar1 * np.sum((DATA_SET['vx'] - mean_vx)**2 +
                                       (DATA_SET['vy'] - mean_vy)**2 +
                                       (DATA_SET['vz'] - mean_vz)**2))
            }

            # calculate GLOBAL DRIFT in x, y and z direction, also the absolute magnitude
            print(
                '## Calculating GLOBAL DRIFT values in x,y,z direction and abs value'
            )
            ZETA = {
                'ZETA_o_x':
                np.sqrt(npar1 * np.sum(
                    (DATA_SET['vx'] - DATA_SET['gasv_x'])**2)),
                'ZETA_o_y':
                np.sqrt(npar1 * np.sum(
                    (DATA_SET['vy'] - DATA_SET['gasv_y'])**2)),
                'ZETA_o_z':
                np.sqrt(npar1 * np.sum(
                    (DATA_SET['vz'] - DATA_SET['gasv_z'])**2)),
                'ZETA_o':
                np.sqrt(npar1 *
                        np.sum((DATA_SET['vx'] - DATA_SET['gasv_x'])**2 +
                               (DATA_SET['vy'] - DATA_SET['gasv_y'])**2 +
                               (DATA_SET['vz'] - DATA_SET['gasv_z'])**2))
            }

            print('## saving calculated GLOBAL DISPERSION and DRIFT')
            io.save(SIGMA, 'sigma_' + file_no, folder=SIM.pc_datadir)
            io.save(ZETA, 'zeta_' + file_no, folder=SIM.pc_datadir)

        # calculate LOCAL DISPERSION and DRIFT
        if LOCAL:
            print('## Calculating LOCAL DISPERSION and DRIFT values')
            tmp_id = [
                DATA_SET[0]['idx'], DATA_SET[0]['idy'], DATA_SET[0]['idz']
            ]
            sigma_l = np.zeros((dim.nx, dim.ny, dim.nz))
            zeta_l = np.zeros((dim.nx, dim.ny, dim.nz))
            particles_l = []
            for particle in DATA_SET:
                if [particle['idx'], particle['idy'],
                        particle['idz']] == tmp_id:
                    particles_l.append(particle)
                else:
                    np_l = np.size(particles_l)
                    if np_l != 0:
                        np1_l = 1. / np_l
                    mean_vx_l = 0
                    mean_vy_l = 0
                    mean_vz_l = 0
                    sum_s_l = 0
                    sum_z_l = 0

                    for entry in particles_l:
                        mean_vx_l = mean_vx_l + np1_l * entry['vx']
                        mean_vy_l = mean_vy_l + np1_l * entry['vy']
                        mean_vz_l = mean_vz_l + np1_l * entry['vz']

                    for entry in particles_l:
                        sum_s_l = sum_s_l + (entry['vx'] - mean_vx_l)**2 + (
                            entry['vy'] - mean_vy_l)**2 + (entry['vz'] -
                                                           mean_vz_l)**2
                        sum_z_l = sum_z_l + (
                            entry['vx'] - entry['gasv_x'])**2 + (
                                entry['vy'] - entry['gasv_y'])**2 + (
                                    entry['vz'] - entry['gasv_z'])**2

                    sigma_l[tmp_id[0]][tmp_id[1]][tmp_id[2]] = np.sqrt(np1_l *
                                                                       sum_s_l)
                    zeta_l[tmp_id[0]][tmp_id[1]][tmp_id[2]] = np.sqrt(np1_l *
                                                                      sum_z_l)

                    # reset all local variables and lists to the new state (ie towards the newst particle)
                    tmp_id = [
                        particle['idx'], particle['idy'], particle['idz']
                    ]
                    particles_l = []
                    particles_l.append(particle)

            # do this local calculations a last time for the last grid cell
            np_l = np.size(particles_l)
            np1_l = 1. / np_l
            mean_vx_l = 0
            mean_vy_l = 0
            mean_vz_l = 0
            sum_s_l = 0
            sum_z_l = 0

            for entry in particles_l:
                mean_vx_l = mean_vx_l + np1_l * entry['vx']
                mean_vy_l = mean_vy_l + np1_l * entry['vy']
                mean_vz_l = mean_vz_l + np1_l * entry['vz']

            for entry in particles_l:
                sum_s_l = sum_s_l + (entry['vx'] - mean_vx_l)**2 + (
                    entry['vy'] - mean_vy_l)**2 + (entry['vz'] - mean_vz_l)**2
                sum_z_l = sum_z_l + (entry['vx'] - entry['gasv_x'])**2 + (
                    entry['vy'] - entry['gasv_y'])**2 + (entry['vz'] -
                                                         entry['gasv_z'])**2

            sigma_l[tmp_id[0]][tmp_id[1]][tmp_id[2]] = np.sqrt(np1_l * sum_s_l)
            zeta_l[tmp_id[0]][tmp_id[1]][tmp_id[2]] = np.sqrt(np1_l * sum_z_l)

            # save sigma, zeta locally and globally to SIM.pc_datadir
            print('## saving calculated LOCAL DISPERSION and DRIFT')
            io.save(sigma_l,
                    'sigma_l_' + file_no,
                    folder=join(SIM.pc_datadir, 'sigma_l'))
            io.save(zeta_l,
                    'zeta_l_' + file_no,
                    folder=join(SIM.pc_datadir, 'zeta_l'))

        ## Please keep this lines as a reminder on how to add columns to an record array!
        # add colums to DATA_SET fror local zeta and sigma for the individuel particle
        #DATA_SET = add_column_to_record_array(DATA_SET, 'zeta_l', zeta_l[DATA_SET['idx'],DATA_SET['idy'],DATA_SET['idz']], dtypes='float', usemask=False, asrecarray=True)
        #DATA_SET = add_column_to_record_array(DATA_SET, 'sigma_l', sigma_l[DATA_SET['idx'],DATA_SET['idy'],DATA_SET['idz']], dtypes='float', usemask=False, asrecarray=True)

    return True
示例#3
0
    def __init__(self,
                 direction='x',
                 trange=[0, -1],
                 sim='.',
                 OVERWRITE=False,
                 quiet=True,
                 jump_distance=0.5,
                 use_existing_pstalk_sav=False):

        from os.path import join
        from os.path import exists as path_exists
        import numpy as np
        from scipy.stats import linregress

        from pencil.io import load, save, exists
        from pencil.read import pstalk as read_pstalk
        from pencil.math.derivatives import simple_centered as derivatives_centered
        from pencil.backpack import printProgressBar

        # .. no pstalk file is found
        if not path_exists(join(sim.datadir, 'proc0',
                                'particles_stalker.dat')):
            print(
                '?? WARNING: No particles_stalker.dat found for simulation ' +
                sim.name + '!! Skipping this run!')
            return False

        # check if already existing. if, then load it
        out_path = join(sim.pc_datadir, 'particle', 'diffusion')
        out_name = direction + '_' + str(trange[0]) + '_' + str(trange[1])

        # skip if diffusion already exists
        if not OVERWRITE and exists(name=out_name, folder=out_path):
            self_tmp = load(out_name, folder=out_path)
            for key in [a for a in dir(self_tmp) if not a.startswith('__')]:
                setattr(self, key, getattr(self_tmp, key))

        else:
            #### start calculations ####
            print('##')
            print('## Calculating particle diffusion for "' + sim.name +
                  '" in "' + sim.path + '"')

            print('## reading particle stalker file..')
            pstalk = read_pstalk(
                sim=sim,
                use_existing_pstalk_sav=use_existing_pstalk_sav,
                tmin=trange[0],
                tmax=trange[1])
            grid = sim.grid
            dim = sim.dim

            # get time range as index for pstalk dataset
            argmin = np.abs(pstalk.t - trange[0]).argmin()
            if trange[1] < 0.:
                argmax = pstalk.t.argmax()
            else:
                argmax = np.abs(pstalk.t - trange[1]).argmin()
            time_range = pstalk.t[argmin:argmax + 1]

            # modify time range with time_range[0] == 0 by substracting first time entry from time list
            #time_offset = pstalk.t[argmin:argmax][0]
            #time_range = pstalk.t[argmin:argmax]-time_offset

            print('\n## doing the calculation for ' + direction)
            L = getattr(grid, 'L' + direction)  # domain size in direction
            N = getattr(dim, 'n' + direction)  # number grid cells in direction
            pos_series = getattr(pstalk, direction + 'p').T[
                argmin:argmax +
                1]  # get position timeseries series for direction for all particles
            N_dt = pos_series.shape[0]  # get number of time steps available
            N_par = pos_series.shape[1]  # get number of stalked particles

            travel_distance = 0. * pos_series  # prepare 'travel distance' array for all particles
            mean_travel_dist = np.array(0)
            # prepare mean, var and sigma arrays
            variance = np.array(0)
            sigma = np.array(0)

            ## calulate travel distances for each particle with corrections for jumps at the boundary
            pbar = False
            Nt = np.size(pos_series)
            for i_t, pos in enumerate(pos_series):
                if i_t == 0: continue  # skip first time_step
                pbar = printProgressBar(i_t, Nt, pbar=pbar)

                # calculate the distance dx made in dt for all particles at once
                dx = pos - pos_series[i_t - 1]
                travel_distance[i_t] = travel_distance[i_t - 1] + dx

                # correct for negative jumps
                jumps = np.where(dx > jump_distance * L)
                travel_distance[i_t][jumps] = travel_distance[i_t][jumps] - L

                # correct for positive jumps
                jumps = np.where(dx < -jump_distance * L)
                travel_distance[i_t][jumps] = travel_distance[i_t][jumps] + L

            # calculate mean, variance and sigma for at 'time' i
            mean_travel_dist = np.mean(travel_distance, axis=1)

            # 1. estimate variance as time series
            variance = np.var(travel_distance, axis=1)
            sigma = np.sqrt(variance)

            # 2. estimate diffusion by using centered derivatives method  !!!
            diff_dvar = 0.5 * derivatives_centered(time_range, variance)
            diffusion_mean = np.mean(diff_dvar)
            diffusion_std = np.std(diff_dvar)

            # create diffusion object
            self.diffusion = diffusion_mean
            self.diffusion_error = diffusion_std
            self.travel_distance = travel_distance
            self.mean_travel_distance = mean_travel_dist
            self.timerange = time_range
            self.variance = variance
            self.sigma = sigma
            self.direction = direction

            print('~ diffusion = ' + str(self.diffusion))
            print('~ diffusion std = ' + str(self.diffusion_error))
            print('~ direction = ' + str(self.direction))

            try:
                print('## saving results in' + join(out_path, out_name))
                save(obj=self, name=out_name, folder=out_path)
            except:
                print("!! Unexpected error:", sys.exc_info()[0])
                print("!! Check if you have writing rights.")
                raise