コード例 #1
0
            fpar.write("task='%s'\n" % task)
            fpar.write("set ylabel '%s'\n" % label)
            fpar.write("file='%s_set'\n" % task)
            fpar.write('xr=%f\n' % (maxtime))
            f.close()
            fpar.close()

    ftasks1d.close()
    ftasks2d.close()

    return


if __name__ == "__main__":

    import pathlib
    from docopt import docopt
    from dedalus.tools import logging
    from dedalus.tools import post
    from dedalus.tools.parallel import Sync

    args = docopt(__doc__)

    output_path = pathlib.Path(args['--output']).absolute()
    # Create output directory if needed
    with Sync() as sync:
        if sync.comm.rank == 0:
            if not output_path.exists():
                output_path.mkdir()
    post.visit_writes(args['<files>'], main, output=output_path)
コード例 #2
0
ファイル: build_datacube.py プロジェクト: bpbrown/dstar
def read_data(files):
    start_time = time.time()

    def accumulate_files(filename, start, count, file_list):
        if start == 0:
            file_list.append(filename)

    file_list = []
    post.visit_writes(files, accumulate_files, file_list=file_list)
    logger.debug(file_list)

    data = {}
    times = None
    r = None
    theta = None

    color = int(len(file_list) > 0)
    comm = MPI.COMM_WORLD.Split(color=color)
    rank = comm.rank
    size = comm.size
    if color:
        for file in file_list:
            logger.debug("opening file: {}".format(file))
            f = h5py.File(file, 'r')
            data_slices = (slice(None), 0, slice(None), slice(None))
            for task in f['tasks']:
                if task[0] == '<' and task[
                        -1] == '>':  #azavgs denoted with '<q>'
                    logger.info("task: {}".format(task))
                    if task in data:
                        data[task] = np.append(data[task],
                                               f['tasks'][task][data_slices],
                                               axis=0)
                    else:
                        data[task] = np.array(f['tasks'][task][data_slices])
                    if r is None or theta is None:
                        theta = f['tasks'][task].dims[2][0][:]
                        r = f['tasks'][task].dims[3][0][:]
            if times is None:
                times = f['scales/sim_time'][:]
            else:
                times = np.append(times, f['scales/sim_time'][:])
            f.close()
    else:
        data = {'zero': np.zeros((1, 1, 1))}
        times = np.zeros(1)

    comm.Barrier()

    n_global_time = np.array(0)
    n_global_data = np.array(0)

    data_set = data
    global_data_set = {}

    comm.Reduce([np.array(times.size), MPI.INT], [n_global_time, MPI.INT],
                op=MPI.SUM,
                root=0)
    if rank == 0:
        n_times_each = np.empty([size], dtype=np.int)
        global_time = np.empty([n_global_time], dtype=np.float64)
    else:
        n_times_each = None
        global_time = None
    comm.Gather(np.array(times.size), n_times_each, root=0)
    if rank == 0:
        send_counts = tuple(n_times_each)
        displacements = tuple(
            np.append(np.zeros(1, dtype=np.int),
                      np.cumsum(n_times_each))[0:-1])
    else:
        send_counts = None
        displacements = None
    comm.Gatherv(times.astype(np.float64),
                 [global_time, send_counts, displacements, MPI.DOUBLE],
                 root=0)

    for task in data_set:
        data = data_set[task]
        comm.Reduce([np.array(data.size), MPI.INT], [n_global_data, MPI.INT],
                    op=MPI.SUM,
                    root=0)
        if rank == 0:
            logger.info("{}: n_time = {}, n_data = {}".format(
                task, n_global_time, n_global_data))
            n_data_each = np.empty([size], dtype=np.int)
            global_data = np.empty([
                n_global_time,
            ] + list(data[0, :, :].shape),
                                   dtype=np.float64)
            logger.debug("{}, {}, {}".format(data.shape, global_time.shape,
                                             global_data.shape))
        else:
            n_data_each = None
            global_data = None

        comm.Gather(np.array(data.size), n_data_each, root=0)

        if rank == 0:
            displacements = tuple(
                np.append(np.zeros(1, dtype=np.int),
                          np.cumsum(n_data_each))[0:-1])
            send_counts = tuple(n_data_each)
        comm.Gatherv(data.astype(np.float64),
                     [global_data, send_counts, displacements, MPI.DOUBLE],
                     root=0)
        global_data_set[task] = global_data
    end_time = time.time()
    logger.info("time to build dataset {:g}sec".format(end_time - start_time))
    return global_data_set, global_time, theta, r
コード例 #3
0
    # Create output directory if needed
    with Sync() as sync:
        if sync.comm.rank == 0:
            if not output_path.exists():
                output_path.mkdir()
    logger.info("output to {}".format(output_path))

    fields = args['--fields'].split(',')

    def accumulate_files(filename, start, count, file_list):
        print(filename, start, count)
        if start == 0:
            file_list.append(filename)

    file_list = []
    post.visit_writes(args['<files>'], accumulate_files, file_list=file_list)
    logger.info(file_list)
    if len(file_list) > 0:
        for file in file_list:
            f = h5py.File(file, 'r')
            r = np.array(f['scales/r/1.5'])
            theta = np.array(f['scales/theta/1.5'])
            phi = np.array(f['scales/phi/1.5'])
            t = np.array(f['scales/sim_time'])
            pcm_list = []
            fig_list = []
            for field in fields:
                if field == 's_fluc':
                    cmap = 'RdYlBu_r'
                else:
                    cmap = None
コード例 #4
0
ファイル: plot_2d_series.py プロジェクト: DenSto/Personal
                plot_tools.plot_bot(dset,(1,2),(slice(None),slice(None)))
            # Add time title
            title = title_func(file['scales/sim_time'][index])
            title_height = 1 - 0.5 * mfig.margin.top / mfig.fig.y
            fig.suptitle(title, x=0.48, y=title_height, ha='left')
            # Save figure
            savename = savename_func(file['scales/write_number'][index])
            savepath = output.joinpath(savename)
            fig.savefig(str(savepath), dpi=dpi)
            fig.clear()
    plt.close(fig)

if __name__ == "__main__":

    import pathlib
    from docopt import docopt
    from dedalus.tools import logging
    from dedalus.tools import post
    from dedalus.tools.parallel import Sync

    args = docopt(__doc__)

    output_path = pathlib.Path(args['--output']).absolute()
    # Create output directory if needed
    with Sync() as sync:
        if sync.comm.rank == 0:
            if not output_path.exists():
                output_path.mkdir()
    post.visit_writes(args['<files>'], main, output=output_path)

コード例 #5
0
    try:
        slicer = float(args['--slice'])
    except:
        slicer = 7.5
        pass
    x_axis = str(args['--x_axis'])
    y_axis = str(args['--y_axis'])
    plot_axes = []
    for s in [x_axis, y_axis]:
        if s == 'z':
            plot_axes.append(1)
        elif s == 'theta':
            plot_axes.append(2)
        elif s == 'r':
            plot_axes.append(3)
        else:
            print(
                'Bad axis basis specified. Options are z, theta, r. Exiting...'
            )
    run_slices = []
    for i in range(3):
        if i + 1 in plot_axes:
            run_slices.append(slice(None))
        else:
            run_slices.append(slicer)

    post.visit_writes(args['<prefix>'],
                      main,
                      output=output_path,
                      args['<tasks>'])
コード例 #6
0
            axes.axis('off')
            # Timestamp title
            if title:
                axes.set_title('%.2f' % file['scales']['sim_time'][index],
                               fontsize='large')
            # Save frame
            if save:
                savename = 'graph_%06i.png' % file['scales/write_number'][index]
                savepath = output.joinpath(savename)
                fig.savefig(str(savepath), dpi=dpi)
            axes.cla()


if __name__ == "__main__":

    import pathlib
    from docopt import docopt
    from dedalus.tools import logging
    from dedalus.tools import post
    from dedalus.tools.parallel import Sync

    args = docopt(__doc__)

    output_path = pathlib.Path(args['--output']).absolute()
    # Create output directory if needed
    with Sync() as sync:
        if sync.comm.rank == 0:
            if not output_path.exists():
                output_path.mkdir()
    post.visit_writes(args['<files>'], plot_writes, output=output_path)
コード例 #7
0
            except ValueError:
                # suffix_num not in PLOT_IDXS, continue
                continue

            fig.savefig(str(savepath) + '_labeled', dpi=dpi)
            fig.clear()
    plt.close(fig)


if __name__ == "__main__":

    import pathlib
    from docopt import docopt
    from dedalus.tools import logging
    from dedalus.tools import post
    from dedalus.tools.parallel import Sync

    args = docopt(__doc__)

    output_path = pathlib.Path(args['--output']).absolute()
    write_start = int(args['--write'])
    # Create output directory if needed
    with Sync() as sync:
        if sync.comm.rank == 0:
            if not output_path.exists():
                output_path.mkdir()
    post.visit_writes(args['<files>'],
                      main,
                      output=output_path,
                      write_start=write_start)
コード例 #8
0
            # Save figure
            savename = savename_func(file['scales/write_number'][index])
            savepath = output.joinpath(savename)
            fig.savefig(str(savepath), dpi=dpi)
            axes.clear()
    plt.close(fig)


if __name__ == "__main__":

    from docopt import docopt
    from dedalus.tools import logging
    from dedalus.tools import post
    from dedalus.tools.parallel import Sync

    args = docopt(__doc__)

    output_path = pathlib.Path(args['--output']).absolute()
    slice_type = args['--slice-type']
    # Create output directory if needed
    with Sync() as sync:
        if sync.comm.rank == 0:
            if not output_path.exists():
                output_path.mkdir()
    print("slice_type = {}".format(slice_type))
    post.visit_writes(args['<files>'],
                      main,
                      output=output_path,
                      slice_type=slice_type)
コード例 #9
0
if args['--output'] is not None:
    output_path = pathlib.Path(args['--output']).absolute()
else:
    data_dir = case + '/'
    output_path = pathlib.Path(data_dir).absolute()

fields = ['s(z)', 'F_h(z)', 'F_κ(z)', 'F_KE(z)', 'F_PE(z)', 'Q_source(z)']


def accumulate_files(filename, start, count, file_list):
    if start == 0:
        file_list.append(filename)


file_list = []
post.visit_writes(files, accumulate_files, file_list=file_list)
logger.debug(file_list)

data = {}
z = None
times = None
for file in file_list:
    logger.debug("opening file: {}".format(file))
    f = h5py.File(file, 'r')
    #data_slices = (slice(None), 0, 0, slice(None))
    data_slices = (slice(None), 0, slice(None))
    for task in f['tasks']:
        if '(z)' in task:  # fluxes denoted with 'f(z)'
            logger.info("task: {}".format(task))
            if task in data:
                data[task] = np.append(data[task],
コード例 #10
0
    climits = args['--climits']
    xlimits = args['--xlimits']
    ylimits = args['--ylimits']
    dns_tidx = int(args['--dns_tidx'])
    ce2_tidx = int(args['--ce2_tidx'])
    field = args['--field']

    climits = tuple([int(i) for i in climits.split(',')])
    xlimits = tuple([int(i) for i in xlimits.split(',')])
    ylimits = tuple([int(i) for i in ylimits.split(',')])

    with Sync() as sync:
        if sync.comm.rank == 0:
            # Create output directory if needed
            output_path = pathlib.Path(args['--output']).absolute()

            if not output_path.exists():
                output_path.mkdir()

    post.visit_writes(args['<ce2_file_path>'],
                      plot_power_spectra,
                      field=field,
                      climits=climits,
                      xlimits=xlimits,
                      ylimits=ylimits,
                      dns_tidx=-1,
                      ce2_tidx=-1,
                      label=label,
                      dns_filename=args['<dns_file_path>'],
                      output=args['--output'])
コード例 #11
0
            temp = file['tasks/Temperature(y=+1)'][index,:,:,0]
            c_temp = np.fft.rfft(temp,axis=1)
            power = (c_temp * c_temp.conj()).real
            log_power = np.log10(power)

            vmin = np.nanmin(log_power)
            vmax = np.nanmax(log_power)
            if vmin == -np.inf:
                vmin = vmax-16
            print("vmin, vmax = {},{}".format(vmin,vmax))
            img.set_data(log_power)
            cbar.mappable.set_clim(vmin=vmin,vmax=vmax)
            cbar.set_ticks([vmin,vmax])
            cax.xaxis.tick_top()
            cax.xaxis.set_label_position('top')

            savename = savename_func(file['scales/write_number'][index])
            savepath = output.joinpath(savename)
            fig.savefig(str(savepath), dpi=dpi)
    plt.close(fig)

args = docopt(__doc__)
output_path = pathlib.Path(args['--output']).absolute()
# Create output directory if needed
with Sync() as sync:
    if sync.comm.rank == 0:
        if not output_path.exists():
            output_path.mkdir()
post.visit_writes(args['<files>'], calc_front, output=output_path)