Exemplo n.º 1
0
    def open(self, mode: str, *, remote_operation: bool = False):
        """open numpy file handle coded directories

        Parameters
        ----------
        mode : str
            one of `a` for `write-enabled` mode or `r` for read-only
        remote_operation : bool, optional, kwarg only
            True if remote operations call this method. Changes the symlink
            directories used while writing., by default False
        """
        self.mode = mode
        if self.mode == 'a':
            process_dir = self.REMOTEDIR if remote_operation else self.STAGEDIR
            if not os.path.isdir(process_dir):
                os.makedirs(process_dir)

            process_uids = [
                psplitext(x)[0] for x in os.listdir(process_dir)
                if x.endswith('.npy')
            ]
            for uid in process_uids:
                file_pth = pjoin(process_dir, f'{uid}.npy')
                self.rFp[uid] = partial(open_memmap, file_pth, 'r')

        if not remote_operation:
            if not os.path.isdir(self.STOREDIR):
                return
            store_uids = [
                psplitext(x)[0] for x in os.listdir(self.STOREDIR)
                if x.endswith('.npy')
            ]
            for uid in store_uids:
                file_pth = pjoin(self.STOREDIR, f'{uid}.npy')
                self.rFp[uid] = partial(open_memmap, file_pth, 'r')
Exemplo n.º 2
0
    def open(self, mode: str, *, remote_operation: bool = False):
        """Open an hdf5 file handle in the Handler Singleton

        Parameters
        ----------
        mode : str
            one of `r` or `a` for read only / read-write.
        repote_operation : optional, kwarg only, bool
            if this hdf5 data is being created from a remote fetch operation, then
            we don't open any files for reading, and only open files for writing
            which exist in the remote data dir. (default is false, which means that
            write operations use the stage data dir and read operations use data store
            dir)
        """
        self.mode = mode
        if self.mode == 'a':
            process_dir = self.REMOTEDIR if remote_operation else self.STAGEDIR
            if not os.path.isdir(process_dir):
                os.makedirs(process_dir)

            process_uids = [psplitext(x)[0] for x in os.listdir(process_dir) if x.endswith('.hdf5')]
            for uid in process_uids:
                file_pth = pjoin(process_dir, f'{uid}.hdf5')
                self.rFp[uid] = partial(h5py.File, file_pth, 'r', swmr=True, libver='latest')

        if not remote_operation:
            if not os.path.isdir(self.STOREDIR):
                return
            store_uids = [psplitext(x)[0] for x in os.listdir(self.STOREDIR) if x.endswith('.hdf5')]
            for uid in store_uids:
                file_pth = pjoin(self.STOREDIR, f'{uid}.hdf5')
                self.rFp[uid] = partial(h5py.File, file_pth, 'r', swmr=True, libver='latest')
Exemplo n.º 3
0
    def delete_in_process_data(repo_path, *, remote_operation=False):
        """Removes some set of files entirely from the stage/remote directory.

        DANGER ZONE. This should essentially only be used to perform hard resets
        of the repository state.

        Parameters
        ----------
        repo_path : str
            path to the repository on disk
        remote_operation : optional, kwarg only, bool
            If true, modify contents of the remote_dir, if false (default) modify
            contents of the staging directory.
        """
        data_dir = pjoin(repo_path, c.DIR_DATA, _FmtCode)
        PDIR = c.DIR_DATA_STAGE if not remote_operation else c.DIR_DATA_REMOTE
        process_dir = pjoin(repo_path, PDIR, _FmtCode)
        if not os.path.isdir(process_dir):
            return

        process_uids = (psplitext(x)[0] for x in os.listdir(process_dir)
                        if x.endswith('.npy'))
        for process_uid in process_uids:
            remove_link_pth = pjoin(process_dir, f'{process_uid}.npy')
            remove_data_pth = pjoin(data_dir, f'{process_uid}.npy')
            os.remove(remove_link_pth)
            os.remove(remove_data_pth)
        os.rmdir(process_dir)
''')
        sys.exit()

    if args.type == 'csv':
        import pandas as pd

    from os.path import join as pjoin, exists as pexists, split as psplit, splitext as psplitext
    if not pexists(args.output):
        os.mkdir(args.output)

    if args.input.endswith('.txt'):
        out = parse_file(args.input)
    elif pexists(args.input) and os.path.isdir(args.input):
        for input_filepath in glob(pjoin(args.input, '*.txt')):
            out = parse_file(input_filepath)
            filename = psplitext(psplit(input_filepath)[-1])[0]
            
            if   args.type == 'csv':
                output_filepath = pjoin(args.output, filename + '.csv')
                concat = []
                for entry in out:
                    df = pd.DataFrame(entry['data'], columns=['date', 'magnitude', 'error_margin', 'airmass'])
                    concat.append(df)
                
                df_out = pd.concat(concat)
                df_out.to_csv(output_filepath, index=False)
                print('wrote: %s' % output_filepath)
            elif args.type == 'json':
                output_filepath = pjoin(args.output, filename + '.json')
                with open(output_filepath, 'w') as ofile:
                    ofile.write(json.dumps(out))
Exemplo n.º 5
0
        #  for layer_depth in [1100.0]:
        #      angles = np.linspace(0, 2 * np.pi, 1000)
        #      x_circle = (RADIUS_MARS - layer_depth) * np.sin(angles)
        #      y_circle = (RADIUS_MARS - layer_depth) * np.cos(angles)
        #      ax.plot(x_circle, y_circle, c="k", ls="dotted", lw=0.3)

        ax.set_xlim(-100, RADIUS_MARS + 100)
        ax.set_ylim(1000, RADIUS_MARS + 100)
        ax.set_xlabel("radius / km")
        ax.set_ylabel("radius / km")
        ax.set_title("Ray path for model %s" % fnam_nd)
        ax.set_aspect("equal", "box")
        ax.legend(loc="lower left")
        plt.show()


if __name__ == "__main__":
    args = define_arguments()
    if args.fnam_in[-5:] == ".deck":
        fnam_nd = psplitext(psplit(args.fnam_in)[-1])[0] + ".nd"
        deck2nd(args.fnam_in, fnam_nd=fnam_nd)
    else:
        fnam_nd = psplitext(psplit(args.fnam_in)[-1])[0] + "_ic.nd"
        had_ic = check_nd_for_innercore(fnam_nd_in=args.fnam_in,
                                        fnam_nd_out=fnam_nd)
        if had_ic:
            fnam_nd = args.fnam_in

    main(fnam_nd=fnam_nd, times=args.times, plot_rays=args.plot_rays)
Exemplo n.º 6
0
    def dumpGustsFromTracks(self, trackiter, windfieldPath, fnFormat,
                            progressCallback=None, timeStepCallback=None):
        """
        Dump the maximum wind speeds (gusts) observed over a region to
        netcdf files. One file is created for every track file.

        :type  trackiter: list of :class:`Track` objects
        :param trackiter: a list of :class:`Track` objects.

        :type  windfieldPath: str
        :param windfieldPath: the path where to store the gust output files.

        :type  filenameFormat: str
        :param filenameFormat: the format string for the output file names. The
                               default is set to 'gust-%02i-%04i.nc'.

        :type  progressCallback: function
        :param progressCallback: optional function to be called after a file is
                                 saved. This can be used to track progress.

        :type  timeStepCallBack: function
        :param timeStepCallback: optional function to be called at each
                                 timestep to extract point values for
                                 specified locations.
        """
        if timeStepCallback:
            results = itertools.imap(self.calculateExtremesFromTrack, trackiter,
                                     itertools.repeat(timeStepCallback))
        else:
            results = itertools.imap(self.calculateExtremesFromTrack, trackiter)

        gusts = {}
        done = defaultdict(list)

        i = 0
        for track, result in results:
            gust, bearing, Vx, Vy, P, lon, lat = result

            if track.trackfile in gusts:
                gust1, bearing1, Vx1, Vy1, P1, lon1, lat1 = \
                    gusts[track.trackfile]
                gust = np.where(gust > gust1, gust, gust1)
                Vx = np.where(gust > gust1, Vx, Vx1)
                Vy = np.where(gust > gust1, Vy, Vy1)
                P = np.where(P1 < P, P1, P)

            gusts[track.trackfile] = (gust, bearing, Vx, Vy, P, lon, lat)
            done[track.trackfile] += [track.trackId]
            if len(done[track.trackfile]) >= done[track.trackfile][0][1]:
                path, basename = psplit(track.trackfile)
                base, ext = psplitext(basename)
                dumpfile = pjoin(windfieldPath,
                                 base.replace('tracks', 'gust') + '.nc')

                #dumpfile = pjoin(windfieldPath, fnFormat % (pp.rank(), i))
                self._saveGustToFile(track.trackfile,
                                     (lat, lon, gust, Vx, Vy, P),
                                     dumpfile)

                del done[track.trackfile]
                del gusts[track.trackfile]

                i += 1

                if progressCallback:
                    progressCallback(i)
# pip install pyfits

import sys
import bz2
import pyfits
from os.path import splitext as psplitext

if __name__ == '__main__':
    input_filepath = sys.argv[-1] # e.g. 'frame-g-004570-4-0135.fits.bz2'
    output_filepath = psplitext(input_filepath)[0]

    bz2_file = bz2.BZ2File(input_filepath, 'rb')
    try:
        data = bz2_file.read()
        with open(output_filepath, 'wb') as ofile:
            ofile.write(data)
    finally:
        bz2_file.close()

    hdulist = pyfits.open(output_filepath)

Exemplo n.º 8
0
 def fn_noext(self):
     """ Filename without extension"""
     return psplitext(self.filename)[0]