Example #1
0
def get_obs_metadata(obs):
    beam_meta_data = getmeta(service='obs', params={'obs_id': obs})
    channels = beam_meta_data[u'rfstreams'][u"0"][u'frequencies']
    freqs = [float(c) * 1.28 for c in channels]
    xdelays = beam_meta_data[u'rfstreams'][u"0"][u'xdelays']
    #pythodelays = beam_meta_data[u'rfstreams'][u"0"][u'xdelays']
    ydelays = beam_meta_data[u'rfstreams'][u"0"][u'ydelays']
    _, pointing_AZ, pointing_ZA = mwa_alt_az_za(obs)

    return {
        "channels": channels,
        "frequencies": freqs,
        "xdelays": xdelays,
        "ydelays": ydelays,
        "az": pointing_AZ,
        "za": pointing_ZA
    }
obsid = args.observation

# Check if already spliced
if glob.glob('{0}/{1}*fits'.format(args.work_dir, args.observation)) and \
   not glob.glob('{0}/*_{1}*fits'.format(args.work_dir, args.observation)):
    print('All files are already spliced so exiting')
    exit()

# Get frequency channels
if args.channels:
    channels = args.channels
    channels.sort()
else:
    print("Obtaining metadata from http://mwa-metadata01.pawsey.org.au/metadata/ for OBS ID: " + str(obsid))
    beam_meta_data = meta.getmeta(service='obs', params={'obs_id':obsid})
    channels = beam_meta_data[u'rfstreams'][u"0"][u'frequencies']
print("Chan order: {}".format(channels))


# Move into working dir
old_dir = os.getcwd()
if args.work_dir:
    os.chdir(args.work_dir)


# Getting number of files list
n_fits_list = []
fits_len = []
for ch in channels:
    if args.incoh:
Example #3
0
    def construct_base_string(self):
        """Construct the basic string to be written to the RTS config file.
        This string will then be edit with regexp to update the relevant details.

        Returns
        -------
        file_str  : `str`
            The base string to be written to an RTS configuration srcipt after manipulation.

        Raises
        ------
        CalibrationError
            When there is a problem with some of the observation information and/or its manipulation.
        """
        # get calibrator observation information from database
        logger.info("Querying metadata database for obsevation information...")
        obsinfo = getmeta(service='obs', params={'obs_id': str(self.cal_obsid)})

        # quick check to make sure what's returned is actually real data
        if obsinfo[u'metadata'] is None:
            errmsg = "Metadata database error (metadata empty). Maybe an invalid obs ID?"
            logger.error(errmsg)
            raise CalibrationError(errmsg)

        # get the RA/Dec pointing for the primary beam
        ra_pointing_degs = obsinfo['metadata']['ra_pointing']
        dec_pointing_degs = obsinfo['metadata']['dec_pointing']

        # now get the absolute channels
        self.channels = obsinfo[u'rfstreams'][u"0"][u'frequencies']
        # and figure out the MaxFrequency parameter
        self.max_frequency = 1.28 * (max(self.channels) + 1) # this way we ensure that MaxFrequency is applicable for ALL subbands

        # use the same operations as in timeconvert.py for our specific need
        logger.info("Converting times with astropy")
        #mwa_loc = EarthLocation.of_site('Murchison Widefield Array')
        mwa_loc = EarthLocation.from_geodetic(lon="116:40:14.93", lat="-26:42:11.95", height=377.8)
        #Astropy formating
        utctime = strptime(self.utctime, '%Y%m%d%H%M%S')
        a_time = strftime('%Y-%m-%dT%H:%M:%S', utctime)
        obstime = Time(a_time, format='fits', scale='utc', location=mwa_loc)
        lst_in_hours = obstime.sidereal_time('apparent').hourangle
        jd = obstime.jd
        logger.info("   LST: {0}".format(lst_in_hours))
        logger.info("   JD : {0}".format(jd))

        # set the HA of the image centre to the primary beam HA
        logger.debug("Determining HA and DEC for primary beam")
        self.JD = jd
        pb_ha_hours = (ra_pointing_degs / 360.0) * 24.0
        self.PB_HA = lst_in_hours - pb_ha_hours
        self.PB_DEC = dec_pointing_degs

        logger.debug("Primary beam: HA = {0} hrs, Dec = {1} deg".format(self.PB_HA, self.PB_DEC))
        logger.debug("JD = {0}".format(self.JD))

        # get the lowest frequency channel
        freqs = obsinfo['rfstreams']['0']['frequencies']
        start_channel = freqs[0]

        self.freq_base = start_channel * 1.28e6 - 0.64e6 + 15e3  # frequency base in Hz (based on Steve Ord's logic)
        self.freq_base = self.freq_base / 1.0e6  # convert to MHz

        logger.debug("Frequency lower edge = {0} MHz".format(self.freq_base))

        # make metafits file formatted for RTS
        self.metafits_RTSform = self.metafits.split("_metafits_ppds.fits")[0]
        logger.debug("RTS form metafits location: {0}".format(self.metafits_RTSform))

        # create the final file string, expanding symlinks to real paths
        logger.info("Constructing base RTS configuration script content")
        file_str = """
ReadAllFromSingleFile=
BaseFilename={base}/*_gpubox
ReadGpuboxDirect={read_direct}
UseCorrelatorInput={use_corr_input}

ReadMetafitsFile=1
MetafitsFilename={metafits_file}

DoCalibration=
doMWArxCorrections=1
doRawDataCorrections=1
doRFIflagging=0
useFastPrimaryBeamModels={beam_model_bool}
generateDIjones=1
applyDIcalibration=1
UsePacketInput=0
UseThreadedVI=1

MaxFrequency={max_freq}
ObservationFrequencyBase={base_freq}
ObservationTimeBase={base_time}
ObservationPointCentreHA={obs_ha}
ObservationPointCentreDec={obs_dec}
ChannelBandwidth={fcbw}
NumberOfChannels={nchan}

CorrDumpsPerCadence={corr_dumps_per_cadence}
CorrDumpTime={corr_dump_time}
NumberOfIntegrationBins={n_int_bins}
NumberOfIterations=1

StartProcessingAt=0

ArrayPositionLat={array_lat}
ArrayPositionLong={array_lon}
ArrayNumberOfStations=128

ArrayFile=

SourceCatalogueFile={source_list}
NumberOfCalibrators=1
NumberOfSourcesToPeel=0
calBaselineMin=20.0
calShortBaselineTaper=40.0
FieldOfViewDegrees=1""".format(base=self.data_dir,
                               read_direct=self.readDirect,
                               use_corr_input=self.useCorrInput,
                               metafits_file=self.metafits_RTSform,
                               beam_model_bool=self.beam_model_bool,
                               max_freq=self.max_frequency,
                               base_freq=self.freq_base,
                               base_time=self.JD,
                               obs_ha=self.PB_HA,
                               obs_dec=self.PB_DEC,
                               fcbw=self.fine_cbw,
                               nchan=self.nfine_chan,
                               corr_dumps_per_cadence=self.n_dumps_to_average,
                               corr_dump_time=self.corr_dump_time,
                               n_int_bins=self.n_integration_bins,
                               array_lat=self.ArrayPositionLat,
                               array_lon=self.ArrayPositionLong,
                               source_list=self.source_list)

        return file_str
Example #4
0
def vcs_download(obsid, start_time, stop_time, increment, data_dir,
                 product_dir, parallel,
                 ics=False, n_untar=2, keep="", vcstools_version="master",
                 nice=0):

    #Load computer dependant config file
    comp_config = load_config_file()

    logger.info("Downloading files from archive")
    voltdownload = "voltdownload.py"
    obsinfo = meta.getmeta(service='obs', params={'obs_id':str(obsid)})
    comb_del_check = meta.combined_deleted_check(obsid, begin=start_time, end=stop_time)
    data_format = obsinfo['dataquality']
    if data_format == 1 or (comb_del_check and data_format == 6):
        # either only the raw data is available (data_format == 1) 
        # or there was combined files but they were deleted (comb_del_check and data_format == 6)
        target_dir = link = '/raw'
        if ics:
            logger.error("Data have not been recombined in the "
                         "archive yet. Exiting")
            sys.exit(0)
        data_type = 11
        dl_dir = "{0}/{1}".format(data_dir, target_dir)
        dir_description = "Raw"
    elif data_format == 6:
        target_dir = link = '/combined'
        if ics:
            data_type = 15
        else:
            data_type = 16
        dl_dir = "{0}/{1}".format(data_dir, target_dir)
        dir_description = "Combined"
    else:
        logger.error("Unable to determine data format from archive. Exiting")
        sys.exit(0)
    mdir(dl_dir, dir_description, gid=comp_config['gid'])
    create_link(data_dir, target_dir, product_dir, link)
    batch_dir = product_dir+"/batch/"

    for time_to_get in range(start_time,stop_time,increment):
        if time_to_get + increment > stop_time:
            increment = stop_time - time_to_get + 1
        #need to subtract 1 from increment since voltdownload wants how many
        #seconds PAST the first one

        voltdownload_batch = "volt_{0}".format(time_to_get)
        check_batch = "check_volt_{0}".format(time_to_get)
        volt_secs_to_run = datetime.timedelta(seconds=500*increment)
        check_secs_to_run = "15:00"
        if data_type == 16:
            check_secs_to_run = "10:15:00"

        checks = "checks.py"
        # Write out the checks batch file but don't submit it
        commands = []
        commands.append("newcount=0")
        commands.append("let oldcount=$newcount-1")
        commands.append("sed -i -e \"s/oldcount=${{oldcount}}/oldcount=${{newcount}}/\" {0}".\
                        format(batch_dir+voltdownload_batch+".batch"))
        commands.append("oldcount=$newcount; let newcount=$newcount+1")
        commands.append("sed -i -e \"s/_${{oldcount}}.out/_${{newcount}}.out/\" {0}".\
                        format(batch_dir+voltdownload_batch+".batch"))
        checks_command = "-m download -o {0} -w {1} -b {2} -i {3} --data_type {4}".format(obsid,
                            dl_dir, time_to_get, increment, data_type)
        commands.append('{0} {1}'.format(checks, checks_command))
        commands.append("if [ $? -eq 1 ];then")
        commands.append("sbatch {0}".format(batch_dir+voltdownload_batch+".batch"))
        # if we have tarballs we send the untar jobs to the workq
        if data_type == 16:
            commands.append("else")
            untar = 'untar.sh'
            untar_command = "-w {0} -o {1} -b {2} -e {3} -j {4} {5}".format(dl_dir,
                                obsid, time_to_get, time_to_get+increment-1, n_untar,
                                keep)
            commands.append('{0} {1}'.format(untar, untar_command))

            #commands.append("sbatch {0}.batch".format(batch_dir+tar_batch))
        commands.append("fi")

        # Download and checks should be done on Zeus's cpuq. This will only work
        # on Galaxy as the Ozstar workflow is different
        submit_slurm(check_batch, commands, batch_dir=batch_dir,
                        slurm_kwargs={"time": check_secs_to_run,
                                    "nice": nice},
                        vcstools_version=vcstools_version, submit=False,
                        outfile=batch_dir+check_batch+"_0.out",
                        queue="zcpuq", export="NONE", mem=10240,
                        # Manually handing it the module dir as it should only run
                        module_dir='/group/mwa/software/modulefiles')

        # Write out the tar batch file if in mode 15
        #if format == 16:
        #        body = []
        #        for t in range(time_to_get, time_to_get+increment):
        #                body.append("aprun tar -xf {0}/1149620392_{1}_combined.tar".format(dl_dir,t))
        #        submit_slurm(tar_batch,body,batch_dir=working_dir+"/batch/", slurm_kwargs={"time":"1:00:00", "partition":"gpuq" })


        #module_list=["mwa-voltage/master"]
        #removed the master version load because by default we load the python 3 version
        module_list=[]
        body = []
        body.append("oldcount=0")
        body.append("let newcount=$oldcount+1")
        body.append("if [ ${newcount} -gt 10 ]; then")
        body.append("echo \"Tried ten times, this is silly. Aborting here.\";exit")
        body.append("fi")
        body.append("sed -i -e \"s/newcount=${{oldcount}}/newcount=${{newcount}}/\" {0}\n".\
                    format(batch_dir+check_batch+".batch"))
        body.append("sed -i -e \"s/_${{oldcount}}.out/_${{newcount}}.out/\" {0}".\
                    format(batch_dir+check_batch+".batch"))
        body.append("sbatch -d afterany:${{SLURM_JOB_ID}} {0}".\
                    format(batch_dir+check_batch+".batch"))
        voltdownload_command = "--obs={0} --type={1} --from={2} --duration={3} --parallel={4}"\
                                " --dir={5}".format(obsid, data_type, time_to_get, increment-1,
                                parallel, dl_dir)
        body.append("{0} {1}".format(voltdownload, voltdownload_command))
        submit_slurm(voltdownload_batch, body, batch_dir=batch_dir,
                        module_list=module_list,
                        slurm_kwargs={"time" : str(volt_secs_to_run),
                                      "nice" : nice},
                        vcstools_version=vcstools_version,
                        outfile=batch_dir+voltdownload_batch+"_1.out",
                        queue="copyq", export="NONE", mem=5120,
                        # Manually handing it the module dir as it should only run
                        module_dir='/group/mwa/software/modulefiles')
def plotSkyMap(obsfile,
               targetfile,
               oname,
               show_psrcat=False,
               show_mwa_sky=False,
               show_mwa_unique=False):

    fig = plt.figure()
    ax = fig.add_subplot(111, projection="mollweide")

    mwa_dec_lim = 30
    mwa_only_dec_lim = -50

    if show_mwa_sky:
        # Make a patch that is transformable through axis projection
        # and highlight the visible sky for the MWA
        Path = mpath.Path
        ra_start = -np.pi
        ra_end = np.pi
        dec_start = -np.pi / 2
        dec_end = np.radians(mwa_dec_lim)
        path_data = [
            (Path.MOVETO, (ra_start, dec_start)),
            (Path.LINETO, (ra_start, dec_end)),
            (Path.LINETO, (ra_end, dec_end)),
            (Path.LINETO, (ra_end, dec_start)),
            (Path.CLOSEPOLY, (ra_end, dec_start)),
        ]
        codes, verts = zip(*path_data)
        path = mpath.Path(verts, codes)
        patch = mpatches.PathPatch(path,
                                   lw=0,
                                   ec="lightskyblue",
                                   fc="lightskyblue",
                                   label="All MWA sky")
        ax.add_patch(patch)

    if show_mwa_unique:
        # Make a patch that is transformable through axis projection
        # and highlight the part of the sky ONLY visible to the MWA
        ra_start = -np.pi
        ra_end = np.pi
        dec_start = -np.pi / 2
        dec_end = np.radians(mwa_only_dec_lim)
        path_data = [
            (Path.MOVETO, (ra_start, dec_start)),
            (Path.LINETO, (ra_start, dec_end)),
            (Path.LINETO, (ra_end, dec_end)),
            (Path.LINETO, (ra_end, dec_start)),
            (Path.CLOSEPOLY, (ra_end, dec_start)),
        ]
        codes, verts = zip(*path_data)
        path = mpath.Path(verts, codes)
        patch = mpatches.PathPatch(path,
                                   lw=0,
                                   ec="lightgreen",
                                   fc="lightgreen",
                                   label="Unique MWA sky")
        ax.add_patch(patch)

    if show_psrcat:
        # Retrieve the local installed PSRCAT catalogue and plot those pulsar positions
        cmd = 'psrcat -o short_csv -nocand -nonumber -c "Jname RAJ DECJ" | sed "2d" > psrcat.csv'
        subprocess.call(cmd, shell=True)
        psrcat_coords = read_data("psrcat.csv", delim=";")
        os.remove("psrcat.csv")

        # Create a mask for pulsar in and out of the declination limit of the MWA
        maskGood = psrcat_coords.dec.wrap_at(180 * u.deg).deg < mwa_dec_lim
        maskBad = psrcat_coords.dec.wrap_at(180 * u.deg).deg >= mwa_dec_lim

        psrcat_ra_good = -psrcat_coords.ra.wrap_at(
            180 *
            u.deg).rad[maskGood]  # negative because RA increases to the West
        psrcat_dec_good = psrcat_coords.dec.wrap_at(180 * u.deg).rad[maskGood]

        psrcat_ra_bad = -psrcat_coords.ra.wrap_at(
            180 *
            u.deg).rad[maskBad]  # negative because RA increases to the West
        psrcat_dec_bad = psrcat_coords.dec.wrap_at(180 * u.deg).rad[maskBad]

        # Now plot the pulsar locations
        ax.scatter(psrcat_ra_good,
                   psrcat_dec_good,
                   0.01,
                   marker="x",
                   color="0.4",
                   zorder=1.4)
        ax.scatter(psrcat_ra_bad,
                   psrcat_dec_bad,
                   0.01,
                   marker="x",
                   color="0.8",
                   zorder=1.4)

    # Calculate beam patterns and plot contours
    levels = np.arange(0.25, 1., 0.05)
    cmap = plt.get_cmap("cubehelix_r")

    obsids = read_data(obsfile, coords=False)["OBSID"]
    for obsid in obsids:
        #print "Accessing database for observation: {0}".format(obsid)
        logger.info("Accessing database for observation: {0}".format(obsid))

        # TODO: I think this process is now implemented in a function in mwa_metadb_utils, need to double check
        beam_meta_data = getmeta(service='obs', params={'obs_id': obsid})

        ra = beam_meta_data[u'metadata'][u'ra_pointing']
        dec = beam_meta_data[u'metadata'][u'dec_pointing']
        duration = beam_meta_data[u'stoptime'] - beam_meta_data[
            u'starttime']  #gps time
        delays = beam_meta_data[u'rfstreams'][u'0'][u'xdelays']

        minfreq = float(min(
            beam_meta_data[u'rfstreams'][u"0"][u'frequencies']))
        maxfreq = float(max(
            beam_meta_data[u'rfstreams'][u"0"][u'frequencies']))
        centrefreq = 1.28e6 * (minfreq + (maxfreq - minfreq) / 2)  #in MHz
        channels = beam_meta_data[u'rfstreams'][u"0"][u'frequencies']

        beam_meta_data = get_common_obs_metadata(obsid)

        # Create a meshgrid over which to iterate
        Dec = []
        RA = []
        map_dec_range = np.arange(-89, 90, 3)
        map_ra_range = np.arange(0, 360, 3)
        for i in map_dec_range:
            for j in map_ra_range:
                Dec.append(i)
                RA.append(j)
        RADecIndex = np.arange(len(RA))
        names_ra_dec = np.column_stack((RADecIndex, RA, Dec))

        #print "Creating beam patterns..."
        time_intervals = 600  # seconds

        powout = get_beam_power_over_time(names_ra_dec,
                                          common_metadata=beam_meta_data,
                                          dt=time_intervals,
                                          degrees=True)
        z = []
        x = []
        y = []
        for c in range(len(RA)):
            temppower = 0.
            for t in range(powout.shape[1]):
                power_ra = powout[c, t, 0]
                if power_ra > temppower:
                    temppower = power_ra
            z.append(temppower)
            if RA[c] > 180:
                x.append(-RA[c] / 180. * np.pi + 2 * np.pi)
            else:
                x.append(-RA[c] / 180. * np.pi)

            y.append(Dec[c] / 180. * np.pi)

        nx = np.array(x)
        ny = np.array(y)
        nz = np.array(z)

        #print "Plotting beam pattern contours..."
        logger.info("Plotting beam pattern contours...")
        # Set vmin and vmax to ensure the beam patterns are on the same color scale
        # and plot the beam pattern contours on the map

        #c = ax.tricontour(wrapped_ra, wrapped_dec, final_pattern_power, levels=levels, cmap=cmap, vmin=levels.min(), vmax=levels.max(), zorder=1.3)
        c = ax.tricontour(nx,
                          ny,
                          nz,
                          levels=levels,
                          cmap=cmap,
                          vmin=levels.min(),
                          vmax=levels.max(),
                          zorder=1.3)

    # Make a figure color scale based on the contour sets (which all have the same max/min values
    fig.colorbar(c, fraction=0.02, pad=0.03, label="Zenith normalised power")

    # Plot the target positions, using Astropy to wrap at correct RA
    target_coords = read_data(targetfile)
    wrapped_target_ra = -target_coords.ra.wrap_at(
        180 * u.deg).rad  # negative because RA increases to the West
    wrapped_target_dec = target_coords.dec.wrap_at(180 * u.deg).rad
    #print "Plotting target source positions..."
    logger.info("Plotting target source positions...")
    ax.scatter(wrapped_target_ra,
               wrapped_target_dec,
               10,
               marker="x",
               color="red",
               zorder=1.6,
               label="Target sources")

    xtick_labels = [
        "10h", "8h", "6h", "4h", "2h", "0h", "22h", "20h", "18h", "16h", "14h"
    ]
    ax.set_xticklabels(xtick_labels, color="0.2")
    ax.set_xlabel("Right Ascension")
    ax.set_ylabel("Declination")
    ax.grid(True, color="k", lw=0.5, ls=":")

    # Place upper-right corner of legend at specified Axis coordinates
    ax.legend(loc="upper right",
              bbox_to_anchor=(1.02, 0.08),
              numpoints=1,
              borderaxespad=0.,
              fontsize=6)

    plt.savefig(oname, format="eps", bbox_inches="tight")
    print("Array phase:        {}".format(array_phase))
    print("RA Pointing (deg):  {:6.2f}".format(
        data_dict["metadata"]["ra_pointing"]))
    print("DEC Pointing (deg): {:6.2f}".format(
        data_dict["metadata"]["dec_pointing"]))
    print("Centrefreq (MHz):   {}".format(centre_freq))
    print("Channels:           {}".format(
        data_dict["rfstreams"]["0"]["frequencies"]))
    if array_phase != 'OTH':
        print("~FWHM (arcminute):  {:4.2f}".format(
            calc_ta_fwhm(centre_freq, array_phase=array_phase) * 60.))
    print("FoV (square deg):   {:5.1f}".format(fov))

    # Perform file metadata calls
    files_meta_data = getmeta(servicetype='metadata',
                              service='data_files',
                              params={'obs_id': str(args.obsid)})
    start, stop = obs_max_min(args.obsid, files_meta_data=files_meta_data)
    # Check available files
    available_files, all_files = files_available(
        args.obsid, files_meta_data=files_meta_data)
    # Split into raw and combined files to give a clearer idea of files available
    available_comb = []
    available_raw = []
    available_ics = []
    all_comb = []
    all_raw = []
    all_ics = []
    for file_name in all_files:
        if 'combined' in file_name:
            all_comb.append(file_name)
def find_sources_in_obs(obsid_list,
                        names_ra_dec,
                        obs_for_source=False,
                        dt_input=300,
                        beam='analytic',
                        min_z_power=0.3,
                        cal_check=False,
                        all_volt=False,
                        degrees_check=False,
                        metadata_list=None):
    """Either creates text files for each MWA obs ID of each source within it or a text
    file for each source with each MWA obs is that the source is in.

    Parameters
    ----------
    obsid_list : `list`
        List of MWA observation IDs.
    names_ra_dec : `list`
        An array in the format [[source_name, RAJ, DecJ]]
    obs_for_source : `boolean`, optional
        If `True` creates a text file for each source with each MWA observation that the source is in.
        If `False` creates text files for each MWA obs ID of each source within it. |br| Default: `False`.
    dt_input : `int`, optional
        The time interval in seconds of how often powers are calculated. |br| Default: 300.
    beam : `str`, optional
        The primary beam model to use out of [analytic, advanced, full_EE]. |br| Default: analytic.
    min_z_power : `float`, optional
        Zenith normalised power cut off. |br| Default: 0.3.
    cal_check : `boolean`, optional
        Checks the MWA pulsar database if there is a calibration suitable for the observation ID.
    all_volt : `boolean`, optional
        Included observations with missing or incorrect voltage files. |br| Default: `False`.
    degrees_check : `boolean`, optional
        If true assumes RAJ and DecJ are in degrees. |br| Default: `False`.
    metadata_list : `list`
        List of the outputs of vcstools.metadb_utils.get_common_obs_metadata.
        If not provided, will make the metadata calls to find the data. |br| Default: `None`.

    Returns
    -------
    output_data : `dict`
        The format of output_data is dependant on obs_for_source.
        |br| If obs_for_source is `True` :
        |br|    output_data = {jname:[[obsid, duration, enter, exit, max_power],
        |br|                          [obsid, duration, enter, exit, max_power]]}
        |br| If obs_for_source is `False` :
        |br|    ouput_data = {obsid:[[jname, enter, exit, max_power],
        |br|                         [jname, enter, exit, max_power]]}
    obsid_meta : `list`
        A list of the output of get_common_obs_metadata for each obsid
    """
    import urllib.request
    #prepares metadata calls and calculates power
    powers = []
    #powers[obsid][source][time][freq]
    common_metadata_list = []
    obsid_to_remove = []

    # Loop over observations to check if there are VCS files
    for i, obsid in enumerate(obsid_list):
        # Perform the file meta data call over 10 only 10 seconds as that is suffient test
        try:
            files_meta_data = getmeta(service='data_files',
                                      params={
                                          'obs_id': obsid,
                                          'nocache': 1,
                                          'mintime': int(obsid) + 10,
                                          'maxtime': int(obsid) + 20
                                      })
        except urllib.error.HTTPError as err:
            files_meta_data = None
        if files_meta_data is None:
            logger.warning(
                "No file metadata data found for obsid {}. Skipping".format(
                    obsid))
            obsid_to_remove.append(obsid)
            continue

        # Check raw voltage files
        raw_available = False
        raw_deleted = False
        for file_name in files_meta_data.keys():
            if file_name.endswith('dat'):
                deleted = files_meta_data[file_name]['deleted']
                if deleted:
                    raw_deleted = True
                else:
                    raw_available = True

        # Check combined voltage tar files
        comb_available = False
        comb_deleted = False
        for file_name in files_meta_data.keys():
            if file_name.endswith('tar'):
                deleted = files_meta_data[file_name]['deleted']
                if deleted:
                    comb_deleted = True
                else:
                    comb_available = True

        if raw_available or comb_available or all_volt:
            if metadata_list:
                common_metadata, _ = metadata_list[i]
            else:
                # No metadata supplied so make the metadata call
                common_metadata = get_common_obs_metadata(obsid)
            common_metadata_list.append(common_metadata)
        elif raw_deleted and comb_deleted:
            logger.warning(
                'Raw and combined voltage files deleted for {}'.format(obsid))
            obsid_to_remove.append(obsid)
        elif raw_deleted:
            logger.warning('Raw voltage files deleted for {}'.format(obsid))
            obsid_to_remove.append(obsid)
        elif comb_deleted:
            logger.warning(
                'Combined voltage files deleted for {}'.format(obsid))
            obsid_to_remove.append(obsid)
        else:
            logger.warning(
                'No raw or combined voltage files for {}'.format(obsid))
            obsid_to_remove.append(obsid)
    for otr in obsid_to_remove:
        obsid_list.remove(otr)

    # Calculate the power for all sources and obsids and find when they enter and exit the beam
    beam_coverage = source_beam_coverage(
        obsid_list,
        names_ra_dec,
        common_metadata_list=common_metadata_list,
        dt_input=dt_input,
        beam=beam,
        min_z_power=min_z_power)

    #chooses whether to list the source in each obs or the obs for each source
    output_data = {}
    if obs_for_source:
        for source_name in np.array(names_ra_dec)[:, 0]:
            source_data = []
            for on, obsid in enumerate(obsid_list):
                if source_name in beam_coverage[obsid].keys:
                    # Source was in the beam so include it
                    _, _, _, duration, _, centre_freq, channels = common_metadata_list[
                        on]
                    enter_beam_norm, exit_beam_norm, max_power = beam_coverage[
                        obsid][source_name]
                    source_data.append([
                        obsid, duration, enter_beam_norm, exit_beam_norm,
                        max_power, centre_freq, bandwidth
                    ])
            # For each source make a dictionary key that contains a list of
            # lists of the data for each obsid
            output_data[source_name] = source_data

    else:
        #output a list of sorces for each obs
        for on, obsid in enumerate(obsid_list):
            _, _, _, duration, _, centre_freq, channels = common_metadata_list[
                on]
            obsid_data = []
            for source_name in np.array(names_ra_dec)[:, 0]:
                if source_name in beam_coverage[obsid].keys():
                    enter_beam_norm, exit_beam_norm, max_power = beam_coverage[
                        obsid][source_name]
                    obsid_data.append([
                        source_name, enter_beam_norm, exit_beam_norm, max_power
                    ])
            # For each obsid make a dictionary key that contains a list of
            # lists of the data for each source/pulsar
            output_data[obsid] = obsid_data

    return output_data, common_metadata_list