Beispiel #1
0
def ima_coverage(recompute=False):
    """Returns a dictionary, key=orbit number, value = size in MB for the IMA data from that orbit"""

    results = {}
    try:
        with open(mex.data_directory + "nilsson_ima_coverage.pck", "r") as f:
            results = pickle.load(f)
    except Exception as e:
        recompute = True
        print(e)

    if recompute:
        now = celsius.now()
        directory = mex.data_directory + "aspera/nilsson/Mars_mat_files4/"
        results = {}
        for o in mex.orbits:
            t = mex.orbits[o].start
            if t > celsius.now():
                break
            t1 = mex.orbits[o].finish
            size = 0
            while t < t1:
                dt = celsius.spiceet_to_datetime(t)
                fname = directory + "fion" + "%4d%02d%02d%02d00.mat" % (dt.year, dt.month, dt.day, dt.hour)

                if os.path.exists(fname):
                    size += os.path.getsize(fname)
                t += 3600.0
            results[o] = size / (1024 * 1024)  # MB
        with open(mex.data_directory + "nilsson_ima_coverage.pck", "w") as f:
            pickle.dump(results, f)
    return results
Beispiel #2
0
def plot_aspera_els(start, finish=None, verbose=False, ax=None, colorbar=True,
                        vmin=None, vmax=None, cmap=None, safe=True):
    """docstring for plot_aspera_els"""
    if cmap is None:
        cmap = plt.cm.Spectral_r

    if ax is None:
        ax = plt.gca()
    plt.sca(ax)

    if finish is None:
        finish = start + 86400.

    if vmin is None:
        vmin = 5.

    if vmax is None:
        vmax = 9.

    no_days = (finish - start) / 86400.

    if verbose: print('Plotting ASPERA/ELS between %s and %s...' % (celsius.utcstr(start, 'ISOC'), celsius.utcstr(finish, 'ISOC')))

    directory = mex.data_directory + 'aspera/els/'

    all_files_to_read = []

    for et in np.arange(start - 10., finish + 10., 86400.):
        dt = celsius.spiceet_to_datetime(et)
        f_name = directory + 'MEX_ELS_EFLUX_%4d%02d%02d_*.cef' % (dt.year, dt.month, dt.day)
        all_day_files = glob.glob(f_name)
        if not all_day_files:
            if verbose: print("No files matched %s" % f_name)
        else:
            all_files_to_read.extend(all_day_files)

    success = False
    all_extents = []
    for f_name in all_files_to_read:
        try:
            # Find energy bins:
            with open(f_name, 'r') as f:
                line_no = 0
                while line_no < 43:
                    line_no += 1
                    line = f.readline()
                    if 'DATA = ' in line:
                        energy_bins = np.fromstring(line[7:], sep=',')
                        energy_bins.sort()
                        break
                else:
                    raise IOError("No ENERGY_BINS info found in header")

            data = np.loadtxt(f_name, skiprows = 43, converters={1:lambda x: celsius.utcstr_to_spiceet(x[:-1])})

            if data.shape[1] != (energy_bins.shape[0] + 2):
                raise ValueError("Size of ENERGY_BINS and DATA doesn't match")

            # Check timing:
            dt = np.diff(data[:,1])
            spacing = np.median(dt)
            # checks = abs(dt - spacing) > (spacing/100.)
            # if np.any(checks):
            #     # raise ValueError("Spacing is not constant: %d differ by more than 1%% of %f:" % (np.sum(checks), spacing))
            #     print "Spacing is not constant: %d differ by more than 1%% of %f (Maximum = %f):" % (np.sum(checks), spacing, max(abs(dt - spacing)))
            #
            # if safe and (max(abs(dt - spacing)) > 10.):
            #     print '-- To big spacing - dumping'
            #     continue

            # Interpolate to constant spacing:
            n_records = int((data[-1,1] - data[0,1]) / spacing)
            new_data = np.empty((n_records, data.shape[1])) + np.nan
            new_data[:,1] = np.linspace(data[0,1], data[-1,1], n_records)
            for i in range(3, data.shape[1]):
                new_data[:,i] = np.interp(new_data[:,1],data[:,1], data[:,i], left=np.nan, right=np.nan)

            data = new_data

            extent = (data[0,1], data[-1,1], energy_bins[0], energy_bins[-1])

            if (extent[0] > finish) or (extent[1] < start):
                if verbose:
                    print("This block not within plot range - dumping")
                continue

            all_extents.append(extent)
            if verbose:
                print('Plotting ASPERA ELS block, Time: %s - %s, Energy: %f - %f' % (
                                celsius.utcstr(extent[0],'ISOC'), celsius.utcstr(extent[1],'ISOC'),
                                extent[2], extent[3]))
                print('Shape = ', data.shape)

            plt.imshow(np.log10(data[:,3:].T), interpolation="nearest", aspect='auto', extent=extent, vmin=vmin, vmax=vmax, cmap=cmap)
            success = True
        except IOError as e:
            if verbose:
                print('Error reading %f' % f_name)
                print('--', e)
            continue

    if success and colorbar:
        plt.xlim(start, finish)
        plt.ylim(max([e[2] for e in all_extents]), min([e[3] for e in all_extents]))
        celsius.ylabel('E / eV')
        plt.yscale('log')
        cmap.set_under('w')
        old_ax = plt.gca()
        plt.colorbar(cax=celsius.make_colorbar_cax(), cmap=cmap, ticks=[5,6,7,8,9])
        plt.ylabel(r'log$_{10}$ D.E.F.')
        plt.sca(old_ax)
Beispiel #3
0
def read_ima(start, finish=None, dataset="FION", verbose=False, aux=False):
    """Read Nilsson's ima files into a list.
    dataset="fion": best for heavy-ions
    dataset="ion": best for protons, but not much in it.
    dataset="aux": ancillary info
note: setting aux=True also reads the aux files, and appends each files contents into the
principal blocks being read by extending each dictionary."""

    if aux:
        if dataset == "aux":
            raise ValueError("""Don't append aux data to the aux data. The whole universe will explode""")

    if finish is None:
        finish = start + 1.0

    directory = mex.data_directory + "aspera/nilsson/Mars_mat_files4/"

    remote_path = "[email protected]:/irf/data/mars/aspera3/Mars_mat_files4/"

    et = start - 3600.0  # one second before

    out = []
    error_files = []
    remote_failed_count = 0

    while True:
        if et > (finish + 3600.0):
            break

        dt = celsius.spiceet_to_datetime(et)
        fname = dataset.lower() + "%4d%02d%02d%02d00.mat" % (dt.year, dt.month, dt.day, dt.hour)

        if not os.path.exists(directory + fname):
            remote_fname = remote_path + fname

            fd, temp_f_name = tempfile.mkstemp(suffix="tmp.mat")
            command = ("scp", remote_fname, temp_f_name)

            if verbose:
                print("Fetching %s" % remote_fname)
            try:
                # os.spawnvp(os.P_WAIT, command[0], command)
                subprocess.check_call(command)

                # reset to zero on success
                remote_failed_count = 0

            except subprocess.CalledProcessError as e:
                remote_failed_count += 1
                # raise IOError("Retrieval from aurora failed: %s" % str(e))
                print("Retrieval of %s from aurora failed" % remote_fname)
                if remote_failed_count > MAX_FAILED_REMOTE_FILES:
                    print("Maximum failed remote tries reached")
                    break
            else:
                d = os.path.dirname(directory + fname)
                if d and not os.path.exists(d):
                    if verbose:
                        print("Creating %s" % d)
                    os.makedirs(d)

                command = ("mv", temp_f_name, directory + fname)

                try:
                    # os.spawnvp(os.P_WAIT, command[0], command)
                    subprocess.check_call(command)
                except subprocess.CalledProcessError as e:
                    print(e)
                    raise IOError("Error moving file to %s" % (directory + fname))

        try:
            out.append(loadmat(directory + fname))

            # Squeeze arrays
            for o in out[-1]:
                if isinstance(out[-1][o], np.ndarray):
                    out[-1][o] = np.squeeze(out[-1][o])

            if verbose:
                print(
                    "Read: %s: %s - %s"
                    % (
                        fname,
                        celsius.time_convert(out[-1]["tmptime"][0], "UTCSTR", "MATLABTIME"),
                        celsius.time_convert(out[-1]["tmptime"][-1], "UTCSTR", "MATLABTIME"),
                    )
                )
        except IOError as e:
            error_files.append(fname)
            if verbose:
                print("Missing: %s" % fname)

        et += 60.0 * 60.0

    if verbose:
        print("Read %d files - %d errors" % (len(out), len(error_files)))

    if aux:
        aux_data_blocks = read_ima(start, finish, dataset="aux", verbose=verbose)

        if len(aux_data_blocks) != len(out):
            raise IOError("Number of aux data files doesn't match the actual data files")

        for block, aux_block in zip(out, aux_data_blocks):
            out["aux"] = aux_block

    return out
Beispiel #4
0
def read_els(start, finish=None, verbose=False):
    """
    Read ASPERA/ELS data into blocks
    """

    if finish is None:
        finish = start + 1.0

    directory = mex.data_directory + "aspera/nilsson/els/mat/"

    et = start - 3600.0  # one second before

    out = []
    error_files = []

    remote_failed_count = 0
    # allow_remote = True

    while True:
        if et > (finish + 3600.0):
            break

        dt = celsius.spiceet_to_datetime(et)
        fname = "%4d/elec%4d%02d%02d%02d.mat" % (dt.year, dt.year, dt.month, dt.day, dt.hour)
        remote_path = "[email protected]:/irf/data/mars/aspera3/mars/elsdata/"

        if not os.path.exists(directory + fname):
            remote_fname = remote_path + "%04d%02d/%4d%02d%02d%02d0000.mat"
            remote_fname = remote_fname % (dt.year, dt.month, dt.year, dt.month, dt.day, dt.hour)

            fd, temp_f_name = tempfile.mkstemp(suffix="tmp.mat")
            command = ("scp", remote_fname, temp_f_name)

            if verbose:
                print("Fetching %s" % remote_fname)
            try:
                # os.spawnvp(os.P_WAIT, command[0], command)
                subprocess.check_call(command)
                # reset fail count on success
                remote_failed_count = 0
            except subprocess.CalledProcessError as e:
                remote_failed_count += 1
                # raise IOError("Retrieval from aurora failed: %s" % str(e))
                print("Retrieval of %s from aurora failed" % remote_fname)
                if remote_failed_count > MAX_FAILED_REMOTE_FILES:
                    print("Maximum failed remote tries reached")
                    break
            else:
                d = os.path.dirname(directory + fname)
                if d and not os.path.exists(d):
                    if verbose:
                        print("Creating %s" % d)
                    os.makedirs(d)

                command = ("mv", temp_f_name, directory + fname)

                try:
                    # os.spawnvp(os.P_WAIT, command[0], command)
                    subprocess.check_call(command)
                except subprocess.CalledProcessError as e:
                    print(e)
                    raise IOError("Error moving file to %s" % (directory + fname))

        try:
            b = loadmat(directory + fname)
        except IOError as e:
            error_files.append(fname)
            if verbose:
                print("Missing: %s" % fname)
            et += 60.0 * 60.0
            continue

        # Squeeze arrays
        for o in b:
            if isinstance(b[o], np.ndarray):
                b[o] = np.squeeze(b[o])

        if verbose:
            print(
                "Read:    %s: %s - %s"
                % (
                    fname,
                    celsius.time_convert(b["elstimes"][0, 0], "UTCSTR", "MATLABTIME"),
                    celsius.time_convert(b["elstimes"][0, -1], "UTCSTR", "MATLABTIME"),
                )
            )

        #         if b['EElec'].size < 2:
        #             print("""----------------------
        # read_els: %s contains data that is effectively 1-dimensional (i.e. not an image)
        # shape is %s. Ignoring this!
        # ---------------------""" % (fname, str(b['fElec'].shape)))
        #             et += 60. * 60.
        #             continue

        et += 60.0 * 60.0
        out.append(b)

    return out