예제 #1
0
    def load_dataset(self, o_dir, v_n):

        # flist = glob(dir + "/{}.file_*.h5".format(v_n))

        if v_n in self.name_conversion_map:
            v_n_ = self.name_conversion_map[v_n]
        else:
            v_n_ = v_n

        def get_number(file_):
            return int(str(file_.split('.file_')[-1]).split('.h5')[0])

        fname = v_n_ + '.file_*.h5'
        files = locate(fname,
                       root=self.gen_set['indir'] + o_dir + '/data/',
                       followlinks=True)
        files = sorted(files, key=get_number)
        if len(files) == 0:
            raise ValueError(
                "For '{}' in {} found NO files \n searched:{}".format(
                    v_n_, o_dir, fname))

        print("\t loading '{}' ({} with {} files) ".format(
            v_n, o_dir, len(files)))

        # carefully creating dataset, as some .h5 might be corrupted
        try:
            dset = h5.dataset(files)
        except IOError:
            cleared_files = []
            for file_ in files:
                try:
                    tmp = h5py.File(file_, "r")
                    tmp.close()
                except IOError:
                    Printcolor.red("Error! Corrupted file: {}".format(
                        file_.split(self.sim)[-1]))
                    break
                cleared_files.append(file_)
            dset = h5.dataset(cleared_files)

        if not v_n_ in dset.contents.keys()[0]:
            raise NameError(
                "Loaded dataset ({}) does not contain required v_n:{}".format(
                    dset.contents.keys()[0], v_n_))

        self.dataset_matrix[self.i_output(o_dir)][self.i_v_n(
            v_n)] = copy.deepcopy(dset)
        dset.close_files()
        dset.close_files()
        dset.close_files()
        dset.close_files()
예제 #2
0
def load_one_dset_to_get_iter(time_, key, inpath, output):

    files = FileWork.get_filelist(key, inpath, output)
    # files = get_filelist(key, output_dir)

    dset = h5.dataset(files[0])  # fastest way

    dataset_iterations = dset.iterations
    dataset_times = []
    for it in dataset_iterations:
        dataset_times.append(float(dset.get_time(it)))

    print("\t Iterations {}".format(dataset_iterations))
    print("\t Times    "),
    print([("{:.3f}, ".format(i_time)) for i_time in dataset_times])
    # print(' ')

    # selecting the iteration that has the closest time to the required
    idx = find_nearest_index(np.array(dataset_times),
                             time_ / (0.004925794970773136 * 1e-3))
    iteration = dataset_iterations[idx]
    closest_time = dataset_times[idx]

    print("\t it:{} with time:{:.3f} is the closest to required time:{:.3f}".
          format(iteration, closest_time * 0.004925794970773136 * 1e-3, time_))

    return iteration, closest_time * 0.004925794970773136 * 1e-3
예제 #3
0
    def load_dataset(self, o_dir, plane, v_n):
        fname = v_n + '.' + plane + '.h5'
        files = locate(fname, root=self.gen_set['indir'] + o_dir +'/', followlinks=False)
        print("\t Loading: {} plane:{} v_n:{} dataset ({} files)"
              .format(o_dir, plane, v_n, len(files)))
        if len(files) > 1:
            raise ValueError("More than 1 file ({}) found. \nFile:{} location:{}"
                             "\nFiles: {}"
                             .format(len(files), fname, self.gen_set['indir'] + o_dir +'/', files))
        if len(files) == 0:
            raise IOError("NO fils found for {}. \nlocation:{}"
                             .format(fname, self.gen_set['indir'] + o_dir +'/'))
        dset = h5.dataset(files)
        # grid = dset.get_grid(iteration=it)
        # print("grid.origin: {}".format(grid.origin))
        # print("grid.dim   : {}".format(grid.dim))
        # print("grid.coordinates(): {}".format([ [np.array(coord).min(), np.array(coord).max()] for coord in grid.coordinates()]))
        # print("grid.levels: {}".format([level for level in grid.levels]))
        # print("grid.extent: {}".format(grid.extent))

        # exit(1)
        # print("\t loading it:{} plane:{} v_n:{} dset:{}"
        #       .format(o_dir, plane, v_n, dset))
        dset.get_grid().mesh()
        # dset.get_grid_data()
        self.dataset_matrix[self.i_output(o_dir)][self.i_plane(plane)][self.i_v_n(v_n)] = dset
예제 #4
0
    def get_grid_for_it(self, key):

        files = FileWork.get_filelist(key, self.inpath, self.output)
        # files = self.get_filelist(key)

        # create a scidata dataset out of those files
        print("\t Parsing the metadata..."),
        start_t = time.time()
        dset = h5.dataset(files)
        if not self.it in dset.iterations:
            raise ValueError(
                "Required it: {} is not in dset.iterations() {}".format(
                    self.it, dset.iterations))
        print("done! (%.2f sec)" % (time.time() - start_t))

        # Get the grid
        print("\t Reading the grid..."),
        start_t = time.time()
        grid = dset.get_grid(iteration=self.it)
        print("done! (%.2f sec)" % (time.time() - start_t))

        return dset, grid
예제 #5
0
from pylab import *
import matplotlib.patches as patches
home = os.environ["HOME"]
nullfmt = NullFormatter()  # no labels


def get_data(df, it, rl):
    grid = df.get_reflevel(iteration=it, reflevel=rl)
    x, y = grid.mesh()
    dat = df.get_reflevel_data(grid, iteration=it)
    return x, y, dat


filename = home + "/simulations/test_dx0.1_sph3/output-0000/myeostest/alp.yz.h5"
print "Opening dataset " + str(filename)
datafilealp = hdf5.dataset(filename)
print "Completed opening from dataset"

filename = home + "/simulations/test_dx0.1_sph3/output-0000/myeostest/rho.yz.h5"
print "Opening dataset " + str(filename)
datafilerho = hdf5.dataset(filename)
print "Completed opening dataset"

# define the colormap
cmap = [plt.cm.gist_earth, plt.cm.afmhot]

# units
uc = 2.99792458 * 10**(10)
uG = 6.67428 * 10**(-8)
uMs = 1.9884 * 10**(33)
utime = uG * uMs / uc**3 * 1000
예제 #6
0
    def __init__(self,
                 it,
                 output,
                 inpath,
                 outpath,
                 def_nu_v_n="thc_M0_abs_energy",
                 overwrite=False):

        self.it = it
        self.output = output
        self.inpath = inpath
        self.outpath = outpath
        self.description = None
        self.overwrite = overwrite

        outfname = self.outpath + str(self.it) + "nu.h5"
        if (not os.path.isfile(outfname)) or \
                (os.path.isfile(outfname) and self.overwrite):

            # get reflevel for future use
            default_dset = h5.dataset(
                FileWork.get_filelist(def_nu_v_n, self.inpath, self.output))

            reflevel = default_dset.get_reflevel()
            nrad = reflevel.n[0]
            ntheta = int(round(sqrt(float(reflevel.n[1] / 2))))
            nphi = 2 * ntheta
            if ntheta * nphi != reflevel.n[1]:
                raise ValueError("The leakage grid is inconsistent")

            for key, val in Names.nu_dattar.iteritems():
                print("\tProcessing key'{}' val:'{}'".format(key, val))
                files = FileWork.get_filelist(key, self.inpath, self.output)
                assert len(files)
                dset = h5.dataset(files)
                data = dset.get_reflevel_data(reflevel=reflevel,
                                              iteration=int(self.it),
                                              variable=val,
                                              timelevel=0,
                                              dtype=np.float32)
                # print(data)
                # output
                fname = self.outpath + str(self.it) + '_' + key + ".h5"
                dfile = h5py.File(fname, "w")
                # dfile.attrs.create("delta", reflevel.delta)
                # dfile.attrs.create("extent", reflevel.extent())
                dfile.attrs.create("iteration", self.it)
                dfile.attrs.create("time", default_dset.get_time(self.it))
                dfile.attrs.create("nrad", nrad)
                dfile.attrs.create("ntheta", ntheta)
                dfile.attrs.create("nphi", nphi)
                print(data.shape)

                # print('delta: {}'.format(reflevel.delta))
                # print('extent:{}'.format(reflevel.extent()))
                # print('iteration:{}'.format(self.it))
                # print('time:{}'.format(dset.get_time(self.it)))
                # print('nrad:{}'.format(nrad))
                # print('ntheta:{}'.format(ntheta))
                # print('nphi:{}'.format(nphi))
                # exit(1)

                dfile.create_dataset(key, data=data)

                dset.close_files()
                dfile.close()
                print("\tFinished key'{}' val:'{}'".format(key, val))
            # print("done! (%.2f sec)" % (time.time() - start_t))
            default_dset.close_files()

            # load extracted data and save as one file:
            all_in_names = Names.nu_dattar
            dfile = h5py.File(outfname, "w")
            for key, val in all_in_names.iteritems():
                print("\tLoading and appending {}".format(key))
                dfile__ = h5py.File(self.outpath + str(self.it) + '_' + key +
                                    ".h5")
                data = np.array(dfile__[key])
                if key in Names.out.keys():
                    key = Names.out[key]
                dfile.create_dataset(key, data=data, dtype=np.float32)
                dfile.attrs.create("iteration", self.it)
                dfile.attrs.create("time", default_dset.get_time(self.it))
                dfile.attrs.create("nrad", nrad)
                dfile.attrs.create("ntheta", ntheta)
                dfile.attrs.create("nphi", nphi)
            dfile.close()
            print("\tDONE")
        else:
            print("File: {} already exists. Skipping.".format(outfname))
예제 #7
0
    def process_datasets_for_it(self, key, val):

        files = FileWork.get_filelist(key, self.inpath, self.output)
        # files = self.get_filelist(key)

        print("\t Parsing the metadata..."),
        start_t = time.time()
        dset = h5.dataset(files)
        print("done! (%.2f sec)" % (time.time() - start_t))

        if not self.it in dset.iterations:
            raise ValueError(
                "it: {} is missing in dset for v_n: {}\n{}".format(
                    self.it, key, dset.iterations))

        # saving data for iteration
        outfname = self.outpath + str(self.it) + '_' + key + ".h5"
        dfile = h5py.File(outfname, "w")

        if self.description is not None:
            dfile.create_dataset("description",
                                 data=np.string_(self.description))

        print("\t Saving {}...".format(outfname)),
        for rl in range(len(self.grid)):
            gname = "reflevel=%d" % rl
            dfile.create_group(gname)
            dfile[gname].attrs.create("delta", self.grid[rl].delta)
            dfile[gname].attrs.create("extent", self.grid[rl].extent())
            dfile[gname].attrs.create("iteration", self.it)
            dfile[gname].attrs.create("reflevel", rl)
            dfile[gname].attrs.create("time", dset.get_time(self.it))

            # found = False
            # for entry in dset.contents.keys():
            #     print("\tNot found {} in {}".format(val, entry.split()))
            #     if val in entry.split() \
            #             and "it={}".format(self.it) in entry.split() \
            #             and 'c=0' in entry.split():
            #         found = True
            #         print("\tFound {} -> {}".format(val, entry))
            #         break

            # if found == False:
            #     raise KeyError("Check for found failed.")
            # self.grid[rl]
            # print("\t\tdset.contents : {}".format(dset.iterations))
            data = dset.get_reflevel_data(self.grid[rl],
                                          iteration=int(self.it),
                                          variable=val,
                                          timelevel=0,
                                          dtype=np.float32)
            try:
                data = dset.get_reflevel_data(self.grid[rl],
                                              iteration=int(self.it),
                                              variable=val,
                                              timelevel=0,
                                              dtype=np.float32)
            except KeyError:
                raise KeyError("Failed to extract data from {} file \n"
                               "Data: rl: {} it: {} v_n: {}\n"
                               "".format(files[0], rl, self.it, val))
            dfile[gname].create_dataset(key, data=data)
        dfile.close()
        print("done! (%.2f sec)" % (time.time() - start_t))
        dset.close_files()
        gc.collect()
예제 #8
0
import matplotlib.patches as patches
home = os.environ["HOME"]


def get_data(df, it, rl):
    grid = df.get_reflevel(iteration=it, reflevel=rl)
    x, y = grid.mesh()
    dat = df.get_reflevel_data(grid, iteration=it)
    return x, y, dat


namesim = home + "/simulations/test_sphl1m0_pert0.5_0/output-0000/myeostest"
name = "/vel[0]"
filename = namesim + name + ".yz.h5"
print "Opening dataset " + str(filename)
datafilevx = hdf5.dataset(filename)
print "Completed opening dataset"

name = "/vel[1]"
filename = namesim + name + ".yz.h5"
print "Opening dataset " + str(filename)
datafilevy = hdf5.dataset(filename)
print "Completed opening dataset"

name = "/vel[2]"
filename = namesim + name + ".yz.h5"
print "Opening dataset " + str(filename)
datafilevz = hdf5.dataset(filename)
print "Completed opening dataset"

name = "/rho"
예제 #9
0
def plot(plane, ref):
    # Create plane directory if non-existing
    if not os.path.exists(plane):
        os.mkdir(plane)

    # Find the velocity files
    ret = os.getcwd()
    os.chdir('../../')
    d_dir = os.getcwd()
    xlist = locate('vel[0].{}.h5'.format(plane))
    #print(xlist)
    ylist = locate('vel[1].{}.h5'.format(plane))
    #print(ylist)
    os.chdir(ret)

    # Read the metadata
    xset = h5.dataset(xlist)
    yset = h5.dataset(ylist)

    reflevels = list(set(xset.select_reflevels(iteration=xset.iterations[0])))
    if ref=='all':
        pass
    else:
        reflevels = [r for r in ref if r in reflevels]

    sys.stderr.write("\nvelphi {}-plane\n".format(plane))

    # Find the max and min velocities of the finest reflevel
    sys.stderr.write("Finding the min and max ... ")
    vamax = []
    vamin = []
    for it in xset.iterations:
        va = get_velphi(xset, yset, it, 6)
        vamax.append(np.max(va))
        vamin.append(np.min(va))
    vamax = max(vamax)
    vamin = min(vamin)
    sys.stderr.write("done!\n")

    # Get the apparent horizons
    hax = {"xy": 2, "xz": 1, "yz": 0}[plane]
    horizons = bh.BHHorizons(root=d_dir).horizons

    for reflevel in reflevels:
        sys.stderr.write("reflevel {}: ".format(reflevel))
        pdir = "{0}/r{1}".format(plane,reflevel)
        # Make directory for reflevel
        if not os.path.exists(pdir):
            os.mkdir(pdir)

        # Set axes
        axis = [[]]*4
        pgrid = [None for i in range(len(xset.iterations))]
        for i in range(len(xset.iterations)):
            grid = xset.get_reflevel(iteration=xset.iterations[i],
                                     reflevel=reflevel)
            pgrid[i] = grid.mesh()
        axis[0] = np.min(pgrid[0])
        axis[1] = np.max(pgrid[0])
        axis[2] = np.min(pgrid[1])
        axis[3] = np.max(pgrid[1])

        # Plot each iteration
        sys.stderr.write("Plotting ... ")
        for i in range(len(xset.iterations)):
            it = xset.iterations[i]
            va = get_velphi(xset, yset, it, reflevel)
            dgrid = xset.get_reflevel(iteration=it, reflevel=reflevel).mesh()

            # Plot
            plt.figure()
            ax = plt.axes()
            plot = plt.pcolormesh(dgrid[0], dgrid[1], va, cmap='jet',
                                  vmin=vamin, vmax=vamax)
            cbar = plt.colorbar(plot)
            cbar.set_label(r'$\omega$')
            plt.clim=(vamin, vamax)

            # Plot the horizon
            if it in horizons.keys():
                horizon = horizons[it]
                hslice = horizon.slice(s=0., axis=hax)
                art = Ellipse(xy=hslice.center, width=hslice.diam[1],
                        height=hslice.diam[0], edgecolor='black',
                        facecolor='black', alpha=1.0)
                ax.add_artist(art)

            if plane=='xy':
                plt.xlabel(r'x [M$_{\odot}$]')
                plt.ylabel(r'y [M$_{\odot}$]')
            elif plane=="xz":
                plt.xlabel(r'x [M$_{\odot}$]')
                plt.ylabel(r'z [M$_{\odot}$]')
            else:
                plt.xlabel(r'y [M$_{\odot}$]')
                plt.ylabel(r'z [M$_{\odot}$]')
            ax.axis(axis)
            ax.set_aspect('equal', 'datalim')

            t = xset.get_dataset(iteration=it).attrs["time"]
            plt.title("time = %10.2f" %t)

            plt.savefig("{0}/{1}.png".format(pdir, it))

        # Make movie
        sys.stderr.write("making movie ... ")

        os.chdir(pdir)
        ldir   = tempfile.mkdtemp()
        fnames = os.listdir('./')
        fnames = [f for f in fnames if re.match(r".*\.png$", f) is not None]
        fnames = sorted(fnames)

        for i in range(len(fnames)):
            os.symlink(os.getcwd() + "/" + fnames[i],
                    ldir + "/%05d.png" % i)
        os.system("ffmpeg -y -i {}/%05d.png -vcodec mpeg4 -qscale 1 movie.mp4"
            " &> /dev/null".format(ldir))
        for i in range(len(fnames)):
            os.unlink(ldir + "/%05d.png" % i)
        os.rmdir(ldir)
        os.chdir(ret)

        sys.stderr.write("done!\n")