Esempio n. 1
0
def can_add_file(fn, verbose=False):
    """Checks a file to see if it should be added to the 'files'
        table in the jobtracker DB.

        Input:
            fn: The file to check.
            verbose: Print messages to stdout. (Default: be silent).

        Outputs:
            can_add: Boolean value. True if the file should be added. 
                    False otherwise.
    """
    import datafile
    try:
        datafile_type = datafile.get_datafile_type([fn])
    except datafile.DataFileError, e:
        if verbose:
            print "Unrecognized data file type: %s" % fn
        return False
Esempio n. 2
0
def copy_zaplist(fns, workdir):
    # Copy zaplist to working directory
    filetype = datafile.get_datafile_type(fns)
    parsed = filetype.fnmatch(fns[0]).groupdict()
    if 'date' not in parsed.keys():
        parsed['date'] = "%04d%02d%02d" % \
                            astro_utils.calendar.MJD_to_date(int(parsed['mjd']))

    customzapfns = []
    # First, try to find a custom zaplist for this specific data file
    customzapfns.append(fns[0].replace(".fits", ".zaplist"))
    # Next, try to find custom zaplist for this beam
    customzapfns.append("%s.%s.b%s.zaplist" % \
                        (parsed['projid'], parsed['date'], parsed['beam']))
    # Try to find custom zaplist for this MJD
    customzapfns.append("%s.%s.all.zaplist" % (parsed['projid'], parsed['date']))

    zaptar = tarfile.open(os.path.join(config.processing.zaplistdir, \
                                        "zaplists.tar.gz"), mode='r')
    members = zaptar.getmembers()
    for customzapfn in customzapfns:
        matches = [mem for mem in members \
                    if mem.name.endswith(customzapfn)]
        if matches:
            ti = matches[0] # The first TarInfo object found 
                            # that matches the file name
            # Write custom zaplist to workdir
            localfn = os.path.join(workdir, customzapfn)
            f = open(localfn, 'w')
            f.write(zaptar.extractfile(ti).read())
            f.close()
            print "Copied custom zaplist: %s" % customzapfn
            break
        else:
            # The member we searched for doesn't exist, try next one
            pass
    else:
        # Copy default zaplist
        shutil.copy(config.processing.default_zaplist, workdir)
        print "No custom zaplist found. Copied default zaplist: %s" % \
                config.processing.default_zaplist
    
    zaptar.close()
Esempio n. 3
0
def can_add_file(fn, verbose=False):
    """Checks a file to see if it should be added to the 'files'
        table in the jobtracker DB.

        Input:
            fn: The file to check.
            verbose: Print messages to stdout. (Default: be silent).

        Outputs:
            can_add: Boolean value. True if the file should be added. 
                    False otherwise.
    """
    import jobtracker
    import datafile
    try:
        datafile_type = datafile.get_datafile_type([fn])
    except datafile.DataFileError, e:
        if verbose:
            print "Unrecognized data file type: %s" % fn
        return False
Esempio n. 4
0
def copy_zaplist(fns, workdir):
    # Copy zaplist to working directory
    filetype = datafile.get_datafile_type(fns)
    parsed = filetype.fnmatch(fns[0]).groupdict()
    #if 'date' not in parsed.keys():
    #    parsed['date'] = "%04d%02d%02d" % \
    #                        astro_utils.calendar.MJD_to_date(int(parsed['mjd']))

    customzapfns = []
    # First, try to find a custom zaplist for this specific data file
    #customzapfns.append(fns[0].replace(".fits", ".zaplist"))
    # Next, try to find custom zaplist for this beam
    #customzapfns.append("%s.%s.b%s.zaplist" % \
    #                    (parsed['projid'], parsed['date'], parsed['beam']))
    # Try to find custom zaplist for this MJD
    #customzapfns.append("%s.%s.all.zaplist" % (parsed['projid'], parsed['date']))

    #zaptar_fn = pipeline_utils.get_zaplist_tarball(no_check=True)
    #zaptar = tarfile.open(zaptar_fn, mode='r')
    if 1:
        # Copy default zaplist
        if filetype == datafile.WappPsrfitsData:
            zapfn = config.processing.default_wapp_zaplist
        elif (filetype == datafile.MockPsrfitsData
              or filetype == datafile.MergedMockPsrfitsData):
            zapfn = config.processing.default_mock_zaplist
        elif filetype == datafile.NuppiPsrfitsData:
            zapfn = config.processing.default_span512_zaplist
        elif filetype == datafile.PFFTSPsrfitsData:
            zapfn = config.processing.default_pffts_zaplist
        else:
            raise ValueError("No default zaplist for data files of type %s" % \
                                filetype.__name__)
        shutil.copy(zapfn, workdir)
        print "No custom zaplist found. Copied default zaplist: %s" % \
                zapfn
Esempio n. 5
0
def create_file_entries(request):
    """Given a row from the requests table in the job-tracker DB
        check the FTP server for its files and create entries in
        the files table.

        Input:
            request: A row from the requests table.
        Outputs:
            None
    """
    cftp = CornellFTP.CornellFTP()
    try:
        files = cftp.get_files(request['guid'])
    except CornellFTP.M2Crypto.ftpslib.error_perm:
        exctype, excvalue, exctb = sys.exc_info()
        dlm_cout.outs("FTP error getting file information.\n" \
                        "\tGUID: %s\n\tError: %s" % \
                        (request['guid'], \
                        "".join(traceback.format_exception_only(exctype, excvalue)).strip()))
        files = []
    
    total_size = 0
    num_files = 0
    queries = []
    for fn, size in files:
        # Check if file is from the phantom beam (beam 7)
        datafile_type = datafile.get_datafile_type([fn])
        parsedfn = datafile_type.fnmatch(fn)
        if parsedfn.groupdict().setdefault('beam', '-1') == '7':
            print "Ignoring beam 7 data: %s" % fn
            continue

        # Insert entry into DB's files table
        queries.append("INSERT INTO files ( " \
                            "request_id, " \
                            "remote_filename, " \
                            "filename, " \
                            "status, " \
                            "created_at, " \
                            "updated_at, " \
                            "size) " \
                       "VALUES ('%s', '%s', '%s', '%s', '%s', '%s', %d)" % \
                       (request['id'], fn, os.path.join(config.download.datadir, fn), \
                        'new', jobtracker.nowstr(), jobtracker.nowstr(), size))
        total_size += size
        num_files += 1

    if num_files:
        dlm_cout.outs("Request (GUID: %s) has succeeded.\n" \
                        "\tNumber of files to be downloaded: %d" % \
                        (request['guid'], num_files))
        queries.append("UPDATE requests " \
                       "SET size=%d, " \
                            "updated_at='%s', " \
                            "status='finished', " \
                            "details='Request has been filled' " \
                       "WHERE id=%d" % \
                       (total_size, jobtracker.nowstr(), request['id']))
    else:
        dlm_cout.outs("Request (GUID: %s) has failed.\n" \
                        "\tThere are no files to be downloaded." % \
                        request['guid'])
        queries.append("UPDATE requests " \
                       "SET updated_at='%s', " \
                            "status='failed', " \
                            "details='No files to download' " \
                       "WHERE id=%d" % \
                       (jobtracker.nowstr(), request['id']))
    jobtracker.query(queries)
Esempio n. 6
0
def copy_zaplist(fns, workdir):
    # Copy zaplist to working directory
    filetype = datafile.get_datafile_type(fns)
    parsed = filetype.fnmatch(fns[0]).groupdict()
    if 'date' not in parsed.keys():
        parsed['date'] = "%04d%02d%02d" % \
                            astro_utils.calendar.MJD_to_date(int(parsed['mjd']))

    customzapfns = []
    # First, try to find a custom zaplist for this specific data file
    customzapfns.append(fns[0].replace(".fits", ".zaplist"))
    # Next, try to find custom zaplist for this beam
    customzapfns.append("%s.%s.b%s.zaplist" % \
                        (parsed['projid'], parsed['date'], parsed['beam']))
    # Try to find custom zaplist for this MJD
    customzapfns.append("%s.%s.all.zaplist" %
                        (parsed['projid'], parsed['date']))

    zaptar_fn = pipeline_utils.get_zaplist_tarball(no_check=True)
    zaptar = tarfile.open(zaptar_fn, mode='r')

    members = zaptar.getmembers()
    for customzapfn in customzapfns:
        radar_samples_fn = os.path.splitext(
            customzapfn)[0] + '_merged_radar_samples.txt'
        matches = [mem for mem in members \
                    if mem.name.endswith(customzapfn)]
        radar_matches = [mem for mem in members \
                          if mem.name.endswith(radar_samples_fn)]
        if matches:
            ti = matches[0]  # The first TarInfo object found
            # that matches the file name

            # Write custom zaplist to workdir
            localfn = os.path.join(workdir, customzapfn)
            f = open(localfn, 'w')
            f.write(zaptar.extractfile(ti).read())
            f.close()
            print "Copied custom zaplist: %s" % customzapfn

            if radar_matches:
                radar_ti = radar_matches[0]

                # Write radar samples list to workdir
                radar_localfn = os.path.join(workdir, radar_samples_fn)
                f = open(radar_localfn, 'w')
                f.write(zaptar.extractfile(radar_ti).read())
                f.close()
                print "Copied radar samples list: %s" % radar_samples_fn

            break
        else:
            # The member we searched for doesn't exist, try next one
            pass
    else:
        # Copy default zaplist
        if filetype == datafile.WappPsrfitsData:
            zapfn = config.processing.default_wapp_zaplist
        elif (filetype == datafile.MockPsrfitsData
              or filetype == datafile.MergedMockPsrfitsData):
            zapfn = config.processing.default_mock_zaplist
        else:
            raise ValueError("No default zaplist for data files of type %s" % \
                                filetype.__name__)
        shutil.copy(zapfn, workdir)
        print "No custom zaplist found. Copied default zaplist: %s" % \
                zapfn

    zaptar.close()
Esempio n. 7
0
def create_file_entries(request):
    """Given a row from the requests table in the job-tracker DB
        check the FTP server for its files and create entries in
        the files table.

        Input:
            request: A row from the requests table.
        Outputs:
            None
    """
    cftp = CornellFTP.CornellFTP()
    try:
        files = cftp.get_files(request['guid'])
    except CornellFTP.M2Crypto.ftpslib.error_perm:
        exctype, excvalue, exctb = sys.exc_info()
        dlm_cout.outs("FTP error getting file information.\n" \
                        "\tGUID: %s\n\tError: %s" % \
                        (request['guid'], \
                        "".join(traceback.format_exception_only(exctype, excvalue)).strip()))
        files = []

    total_size = 0
    num_files = 0
    queries = []
    for fn, size in files:
        # Check if file is from the phantom beam (beam 7)
        datafile_type = datafile.get_datafile_type([fn])
        parsedfn = datafile_type.fnmatch(fn)
        if parsedfn.groupdict().setdefault('beam', '-1') == '7':
            print "Ignoring beam 7 data: %s" % fn
            continue

        # Insert entry into DB's files table
        queries.append("INSERT INTO files ( " \
                            "request_id, " \
                            "remote_filename, " \
                            "filename, " \
                            "status, " \
                            "created_at, " \
                            "updated_at, " \
                            "size) " \
                       "VALUES ('%s', '%s', '%s', '%s', '%s', '%s', %d)" % \
                       (request['id'], fn, os.path.join(config.download.temp, fn), \
                        'new', jobtracker.nowstr(), jobtracker.nowstr(), size))
        total_size += size
        num_files += 1

    if num_files:
        dlm_cout.outs("Request (GUID: %s) has succeeded.\n" \
                        "\tNumber of files to be downloaded: %d" % \
                        (request['guid'], num_files))
        queries.append("UPDATE requests " \
                       "SET size=%d, " \
                            "updated_at='%s', " \
                            "status='finished', " \
                            "details='Request has been filled' " \
                       "WHERE id=%d" % \
                       (total_size, jobtracker.nowstr(), request['id']))
    else:
        dlm_cout.outs("Request (GUID: %s) has failed.\n" \
                        "\tThere are no files to be downloaded." % \
                        request['guid'])
        queries.append("UPDATE requests " \
                       "SET updated_at='%s', " \
                            "status='failed', " \
                            "details='No files to download' " \
                       "WHERE id=%d" % \
                       (jobtracker.nowstr(), request['id']))
    jobtracker.query(queries)