Example #1
0
def get_files(psrname):
    """Get a list of database rows the given pulsar.

        Inputs:
            psrname: The name of the pulsar to match.

        Outputs:
            rows: A list of rows containing file and obs
                information for each matching file.
    """
    db = database.Database()
    print "Before", psrname
    psrname = utils.get_prefname(psrname)
    print "After", psrname


    whereclause = (db.obs.c.sourcename == psrname) & \
                  (db.files.c.stage == 'cleaned') & \
                  (db.obs.c.obstype == 'pulsar')
    with db.transaction() as conn:
        select = db.select([db.files,
                            db.obs.c.sourcename,
                            db.obs.c.start_mjd,
                            db.obs.c.rcvr],
                    from_obj=[db.files.\
                            outerjoin(db.obs,
                                onclause=(db.files.c.obs_id ==
                                            db.obs.c.obs_id))]).\
                        where(whereclause).\
                        order_by(db.files.c.added.asc())

        result = conn.execute(select)
        rows = result.fetchall()
        result.close()
    return rows
Example #2
0
def get_files(psrnames, retry=False):
    """Get a list of data base rows containing
        file and obs information for the given pulsar.

        Inputs:
            psrnames: The names of the pulsar to match.
            retry: Only get files to retry calibration on.
                (Default: get all file matching psrnames)

        Outputs:
            rows: A list of rows containing file and obs
                information for each matching file.
    """
    db = database.Database()

    # Select psrs to whereclause
    psrname = utils.get_prefname(psrnames[0])
    whereclause = (db.obs.c.sourcename.like(psrname))
    for psrname in psrnames[1:]:
        psrname = utils.get_prefname(psrname)
        whereclause |= (db.obs.c.sourcename.like(psrname))

    if retry:
        whereclause &= (db.files.c.stage=='cleaned') & \
                       (db.files.c.status.in_(['calfail', 'done'])) & \
                       (db.files.c.qcpassed) & \
                       (db.obs.c.obstype=='pulsar') & \
                       (db.files.c.cal_file_id == None)

    with db.transaction() as conn:
        select = db.select([db.files,
                            db.obs.c.dir_id,
                            db.obs.c.sourcename,
                            db.obs.c.obstype,
                            db.obs.c.start_mjd,
                            db.obs.c.rcvr],
                    from_obj=[db.files.\
                        outerjoin(db.obs,
                            onclause=(db.files.c.obs_id ==
                                        db.obs.c.obs_id))]).\
                    where(whereclause).\
                    order_by(db.files.c.added.asc())
        result = conn.execute(select)
        rows = result.fetchall()
        result.close()

    # Only keep most recently added file for each
    # observation. Rows are sorted in the query above.
    obs_ids = []
    for ii in reversed(range(len(rows))):
        if rows[ii]['obs_id'] in obs_ids:
            rows.pop(ii)
        else:
            obs_ids.append(rows[ii]['obs_id'])
    return rows
Example #3
0
def get_dir_from_id(dir_id):
    db = database.Database()
    with db.transaction() as conn:
        select = db.select([db.directories]).\
                    where(db.directories.c.dir_id==dir_id)
        result = conn.execute(select)
        rows = result.fetchall()
        result.close()
    if len(rows) != 1:
        raise ValueError("Bad number (%d) of rows for Directory ID = %d!" % 
                         (len(rows), dir_id))
    return rows[0]['path']
Example #4
0
def get_id_from_dir(path):
    path = os.path.abspath(path)
    db = database.Database()
    with db.transaction() as conn:
        select = db.select([db.directories]).\
                    where(db.directories.c.path==path)
        result = conn.execute(select)
        rows = result.fetchall()
        result.close()
    if len(rows) != 1:
        raise ValueError("Bad number (%d) of rows for Directory = %s!" % 
                         (len(rows), path))
    return rows[0]['dir_id']
Example #5
0
def main():
    rows = get_files(args.psrnames, retry=args.retry)
    info = {}

    psrnameset = set([row['sourcename'] for row in rows])
    utils.sort_by_keys(rows, args.sortkeys)
    db = database.Database()
    with db.transaction() as conn:
        for row in rows:
            if row['obstype'] == 'pulsar':
                calscans = reduce_data.get_potential_polcal_scans(
                    db, row['obs_id'])
                cancal = bool(calscans)
            sys.stdout.write(args.fmt.decode('string-escape') % row)
            if row['obstype'] == 'pulsar':
                sys.stdout.write("\t%s\n" % cancal)
                utils.print_info(
                    "Number of potential calibrator scans: %d" % len(calscans),
                    1)
                msg = "    %s" % "\n    ".join([
                    "Obs ID: %d; File ID: %d; %s" %
                    (calrow['obs_id'], calrow['file_id'], calrow['filename'])
                    for calrow in calscans if type(calrow) is not str
                ])
                utils.print_info(msg, 2)
            else:
                sys.stdout.write("\n")
            if args.retry:
                for desc in reduce_data.get_all_descendents(
                        row['file_id'], db):
                    if (desc['status'] == 'failed') and (desc['stage']
                                                         == 'calibrated'):
                        # File has been calibrated, but it failed. Do not retry.
                        cancal = False
                        utils.print_info(
                            "Calibration of file %d has previously failed. Will _not_ retry."
                            % row['file_id'], 1)
                if (cancal and (row['status'] != 'failed')) or (
                        not cancal and (row['status'] == 'calfail')):
                    retry(db, row['file_id'])
                    utils.print_info(
                        "Will retry calibration of file %d" % row['file_id'],
                        1)
        if args.retry:
            for name in psrnameset:
                try:
                    reduce_data.reattempt_calibration(db, name)
                    calibrate.update_caldb(db, name, force=True)
                except:
                    pass
Example #6
0
def get_current_files(psrnames, rcvr=None):
    """Get a list of data base rows containing
        file and obs information for the given pulsar,
        filetype and receiver.

        Inputs:
            psrnames: The names of the pulsar to match.
            rcvr: The name of the receiver to match.
                (Default: Match all)

        Outputs:
            rows: A list of rows containing file and obs
                information for each matching file.
    """
    db = database.Database()

    # Select psrs to whereclause
    psrname = utils.get_prefname(psrnames[0])
    tmp = (db.obs.c.sourcename == psrname)
    for psrname in psrnames[1:]:
        psrname = utils.get_prefname(psrname)
        tmp |= (db.obs.c.sourcename == psrname)

    whereclause = tmp
    if rcvr is not None:
        whereclause &= (db.obs.c.rcvr == rcvr)

    with db.transaction() as conn:
        select = db.select([db.files,
                            db.obs.c.dir_id,
                            db.obs.c.sourcename,
                            db.obs.c.obstype,
                            db.obs.c.start_mjd,
                            db.obs.c.length,
                            db.obs.c.bw,
                            db.obs.c.freq,
                            db.obs.c.nsubints,
                            db.obs.c.nsubbands,
                            db.obs.c.obsband,
                            db.obs.c.rcvr],
                    from_obj=[db.obs.\
                        outerjoin(db.files,
                            onclause=(db.files.c.file_id ==
                                        db.obs.c.current_file_id))]).\
                    where(whereclause).\
                    order_by(db.files.c.added.asc())
        result = conn.execute(select)
        rows = result.fetchall()
        result.close()
    return rows
Example #7
0
    def __init__(self, priorities=None, stage='cleaned', re_eval=False):
        super(QualityControl, self).__init__()
        # Set up the window
        self.__setup()
        self.__add_widgets()
        self.__set_keyboard_shortcuts()

        # Establish a database object
        self.db = database.Database()
        
        # Initialize
        self.priorities = priorities
        self.stage = stage
        self.re_eval = re_eval
        self.idiag = 0
        self.file_id = None
        self.diagplots = []
Example #8
0
def main():
    print ""
    print "        calibrate.py"
    print "     Patrick  Lazarus"
    print ""
    
    if len(args.files):
        print "Number of input files: %d" % len(args.files)
    else:
        raise errors.InputError("No files to calibrate!")

    if args.caldb is None:
        # Prepare to fetch caldb info from the pipeline database
        db = database.Database()
    else:
        caldb = args.caldb

    for fn in args.files:
        if args.caldb is None: 
            arf = utils.ArchiveFile(fn)
            caldb = update_caldb(db, arf['name'], force=True)
        calfn = calibrate(fn, caldb)
Example #9
0
def get_files_by_id(file_ids):
    """Get a list of data base rows containing
        file and obs information for the given,
        file IDs.

        Inputs:
            file_ids: A list of file IDs to match

        Outputs:
            rows: A list of rows containing file and obs
                information for each matching file.
    """
    db = database.Database()

    with db.transaction() as conn:
        select = db.select([db.files,
                            db.obs.c.dir_id,
                            db.obs.c.sourcename,
                            db.obs.c.obstype,
                            db.obs.c.start_mjd,
                            db.obs.c.length,
                            db.obs.c.bw,
                            db.obs.c.freq,
                            db.obs.c.nsubints,
                            db.obs.c.nsubbands,
                            db.obs.c.obsband,
                            db.obs.c.rcvr],
                    from_obj=[db.files.\
                        outerjoin(db.obs,
                            onclause=(db.files.c.obs_id ==
                                        db.obs.c.obs_id))]).\
                    where(db.files.c.file_id.in_(args.file_ids)).\
                    order_by(db.files.c.added.asc())
        result = conn.execute(select)
        rows = result.fetchall()
        result.close()

    return rows
Example #10
0
def main():
    db = database.Database()
    with db.transaction() as conn:
        select = db.select([db.files]).\
                    where((db.files.c.snr == None) &
                          (db.files.c.is_deleted == False) &
                          (db.files.c.stage != 'grouped')).\
                    order_by(db.files.c.added.desc())
        result = conn.execute(select)
        rows = result.fetchall()
        result.close()
        for row in utils.show_progress(rows, width=50, tot=len(rows)):
            fn = os.path.join(row['filepath'], row['filename'])
            try:
                snr = utils.get_archive_snr(fn)
            except Exception, e:
                sys.stderr.write("Error when computing SNR of %s."
                                 "%s" % (fn, str(e)))
            else:
                update = db.files.update().\
                            values(snr=snr).\
                            where(db.files.c.file_id == row['file_id'])
                result = conn.execute(update)
                result.close()
Example #11
0
def main():
    db = database.Database()

    obs_id = args.obs_id
    obsinfo = get_obsinfo(db, obs_id)
    datestr = utils.mjd_to_datetime(obsinfo['start_mjd']).strftime("%Y%m%d")
    subdirs = [
        datetime.datetime.now().strftime("%Y%m%dT%H:%M:%S"), datestr,
        obsinfo['sourcename']
    ]
    subdirs.reverse()
    backupdir = os.path.join(config.output_location, "removed", *subdirs)
    print "Will remove database entries for obs ID %d" % obs_id
    print "Back-ups of existing files will be copied to %s" % backupdir

    log_ids, logfns = get_loginfo(db, obs_id)
    assert len(log_ids) == len(logfns)
    print "Will also remove %d logs" % len(log_ids)
    tmp = "\n".join(["Log ID: %d; %s" % xx for xx in zip(log_ids, logfns)])
    utils.print_info(tmp, 1)

    rows = get_fileinfo(db, obs_id)
    file_ids = [
        row['file_id'] for row in rows if not ((row['stage'] == 'grouped') or (
            (row['stage'] == 'combined') and (not row['is_deleted'])))
    ]
    file_ids_left = [
        row['file_id'] for row in rows if row['file_id'] not in file_ids
    ]
    fns = [
        os.path.join(row['filepath'], row['filename']) for row in rows
        if row['file_id'] in file_ids
    ]
    print "Will also remove %d files" % len(rows)
    tmp = "\n".join(["File ID: %d; %s" % xx for xx in zip(file_ids, fns)])
    utils.print_info(tmp, 1)

    diag_ids, diagfns = get_diaginfo(db, file_ids)
    assert len(diag_ids) == len(diagfns)
    print "Will also remove %d diagnostics" % len(diag_ids)
    tmp = "\n".join(
        ["Diagnostic ID: %d; %s" % xx for xx in zip(diag_ids, diagfns)])
    utils.print_info(tmp, 1)

    qctrl_ids = get_qcinfo(db, file_ids)
    print "Will also remove %d quality control entries" % len(qctrl_ids)
    tmp = "\n".join(["QC ID: %d" % xx for xx in qctrl_ids])
    utils.print_info(tmp, 1)

    reatt_ids = get_reattinfo(db, file_ids)
    print "Will also remove %d re-attempt entries" % len(reatt_ids)
    tmp = "\n".join(["Re-attempt ID: %d" % xx for xx in reatt_ids])
    utils.print_info(tmp, 1)

    mysqldumpstr = dump_db_entries(db, obs_id, log_ids, file_ids, diag_ids)
    utils.print_info("MySQL dump:\n%s" % mysqldumpstr, 2)

    if not args.dryrun:
        try:
            # Make back-up directory
            oldumask = os.umask(0007)
            os.makedirs(backupdir)
            os.umask(oldumask)
            # Write mysql dump
            with open(os.path.join(backupdir, "db_entries.sql"), 'w') as ff:
                ff.write(mysqldumpstr)
            # Move files
            for src in fns + logfns + diagfns:
                fn = os.path.basename(src)
                dest = os.path.join(backupdir, fn)
                if os.path.isfile(src):
                    # Make sure file exists (it may have already been deleted)
                    shutil.move(src, dest)
            # Remove entries from the database
            with db.transaction() as conn:
                # Remove diagnostic entries
                delete = db.diagnostics.delete().\
                            where(db.diagnostics.c.diagnostic_id.in_(diag_ids))
                results = conn.execute(delete)
                results.close()
                # Remove any quality control entries in the database
                delete = db.qctrl.delete().\
                            where(db.qctrl.c.qctrl_id.in_(qctrl_ids))
                results = conn.execute(delete)
                results.close()
                # Remove obs' 'current_file_id' entry
                update = db.obs.update().\
                            where(db.obs.c.obs_id == row['obs_id']).\
                            values(current_file_id=None)
                results = conn.execute(update)
                results.close()
                # Remove file entries
                # (newest first because of foreign key constraints - parent_file_id column)
                for row in rows:
                    if (row['stage'] == 'grouped') or \
                            ((row['stage'] == 'combined') and (not row['is_deleted'])):
                        # Leave grouped files and undeleted combined files
                        pass
                    else:
                        delete = db.files.delete().\
                                    where(db.files.c.file_id == row['file_id'])
                        results = conn.execute(delete)
                        results.close()
                #
                # Do not delete log entries from the database even though log file was moved
                #
                # Update newest file left to have status new
                update = db.files.update().\
                            where(db.files.c.file_id == max(file_ids_left)).\
                            values(status='new',
                                   note='Data are being reprocessed.',
                                   last_modified=datetime.datetime.now())
                conn.execute(update)

        except:
            print "Error encountered! Will attempt to un-move files."
            # Try to unmove files
            for src in fns + logfns + diagfns:
                fn = os.path.basename(src)
                dest = os.path.join(backupdir, fn)
                if os.path.isfile(dest) and not os.path.isfile(src):
                    shutil.move(dest, src)
            if os.path.isdir(backupdir):
                try:
                    os.remove(os.path.join(backupdir, "db_entries.sql"))
                    os.rmdir(backupdir)
                except:
                    print "Could not remove back-up dir %s" % backupdir
            raise
        else:
            print "Successfully reseted obs ID: %d" % obs_id
Example #12
0
def main():
    psrname = utils.get_prefname(args.psr)
    print "Will delete database rows (and referenced files) " \
          "for source name %s" % psrname

    db = database.Database()
    with db.transaction() as conn:
        select = db.select([db.obs.c.obs_id]).\
                        where(db.obs.c.sourcename == psrname)
        results = conn.execute(select)
        obsrows = results.fetchall()
        results.close()

        select = db.select([db.files.c.file_id,
                            db.files.c.filepath,
                            db.files.c.filename],
                           from_obj=[db.files.\
                                outerjoin(db.obs,
                                    onclause=db.files.c.obs_id ==
                                          db.obs.c.obs_id)]).\
                    where(db.obs.c.sourcename == psrname).\
                    order_by(db.files.c.file_id.desc())
        results = conn.execute(select)
        filerows = results.fetchall()
        results.close()

        select = db.select([db.logs.c.log_id,
                            db.logs.c.logpath,
                            db.logs.c.logname],
                           from_obj=[db.logs.\
                                outerjoin(db.obs,
                                    onclause=db.logs.c.obs_id ==
                                          db.obs.c.obs_id)]).\
                    where(db.obs.c.sourcename == psrname)
        results = conn.execute(select)
        logsrows = results.fetchall()
        results.close()

        select = db.select([db.diagnostics.c.diagnostic_id,
                            db.diagnostics.c.diagnosticpath,
                            db.diagnostics.c.diagnosticname],
                           from_obj=[db.diagnostics.\
                               outerjoin(db.files,
                                    onclause=db.diagnostics.c.file_id ==
                                        db.files.c.file_id).\
                               outerjoin(db.obs,
                                    onclause=db.files.c.obs_id ==
                                        db.obs.c.obs_id)]).\
                    where(db.obs.c.sourcename == psrname)
        results = conn.execute(select)
        diagrows = results.fetchall()
        results.close()

        select = db.select([db.obs.c.dir_id]).\
                    where(db.obs.c.sourcename == psrname).\
                    distinct(db.obs.c.dir_id)
        results = conn.execute(select)
        dirrows = results.fetchall()
        results.close()

        print "There are %d entires to be removed from files table" % \
            len(filerows)
        print "There are %d entires to be removed from obs table" % \
            len(obsrows)
        print "There are %d entries to be removed from directories table" % \
            len(dirrows)
        print "There are %d entires to be removed from logs table" % \
            len(logsrows)
        print "There are %d entries to be remove from diagnostics table" % \
            len(diagrows)

        if not args.dryrun:
            # Remove diagnostics entries
            print "Removing diagnostic rows"
            for row in utils.show_progress(diagrows,
                                           width=50,
                                           tot=len(diagrows)):
                diagnostic_id = row['diagnostic_id']
                ff = os.path.join(row['diagnosticpath'], row['diagnosticname'])
                try:
                    os.remove(ff)
                except:
                    pass
                delete = db.diagnostics.delete().\
                        where(db.diagnostics.c.diagnostic_id == diagnostic_id)
                conn.execute(delete)
                results.close()

            # Remove files entries
            print "Removing file rows"
            for row in utils.show_progress(filerows,
                                           width=50,
                                           tot=len(filerows)):
                file_id = row['file_id']
                ff = os.path.join(row['filepath'], row['filename'])
                try:
                    os.remove(ff)
                except:
                    pass
                delete = db.files.delete().\
                        where(db.files.c.file_id == file_id)
                conn.execute(delete)
                results.close()

            # Remove logs entries
            print "Removing log rows"
            for row in utils.show_progress(logsrows,
                                           width=50,
                                           tot=len(logsrows)):
                log_id = row['log_id']
                ff = os.path.join(row['logpath'], row['logname'])
                try:
                    os.remove(ff)
                except:
                    pass
                delete = db.logs.delete().\
                        where(db.logs.c.log_id == log_id)
                conn.execute(delete)
                results.close()

            # Remove obs entries
            print "Removing obs rows"
            for row in utils.show_progress(obsrows, width=50,
                                           tot=len(obsrows)):
                obs_id = row['obs_id']
                delete = db.obs.delete().\
                        where(db.obs.c.obs_id == obs_id)
                conn.execute(delete)
                results.close()

            # Remove directories entries
            print "Removing directories rows"
            for row in utils.show_progress(dirrows, width=50,
                                           tot=len(dirrows)):
                dir_id = row['dir_id']
                delete = db.directories.delete().\
                        where(db.directories.c.dir_id == dir_id)
                conn.execute(delete)
                results.close()
Example #13
0
def main():
    if args.dir_id is not None:
        # Get directory path from database
        dir_toremove = get_dir_from_id(args.dir_id)
        dir_id = arg.dir_id
    else:
        dir_toremove = os.path.join(config.base_rawdata_dir, args.dir)
        dir_id = get_id_from_dir(args.dir)
    if not dir_toremove.startswith(config.base_rawdata_dir):
        raise ValueError("Directory to remove (%s) is not in the raw "
                         "data directory (%s)" % 
                         (dir_toremove, config.base_rawdata_dir))

    subdirs = [datetime.datetime.now().strftime("%Y%m%dT%H:%M:%S")]
    tmp = dir_toremove
    while tmp and (os.path.abspath(config.base_rawdata_dir) != os.path.abspath(tmp)):
        tmp, tmp2 = os.path.split(tmp)
        subdirs.append(tmp2)
    subdirs.reverse()
    backupdir = os.path.join(config.output_location, "removed", *subdirs)
    print "Will remove database entries for data in %s" % dir_toremove
    print "Back-ups of existing files will be copied to %s" % backupdir
    
    db = database.Database()

    obs_ids = get_obsinfo(db, dir_id)
    print "Will also remove %d observations" % len(obs_ids)
    tmp = ", ".join(["%d" % xx for xx in obs_ids])
    utils.print_info("Obs IDs: %s" % tmp, 1)

    log_ids, logfns = get_loginfo(db, obs_ids)
    assert len(log_ids) == len(logfns)
    print "Will also remove %d logs" % len(log_ids)
    tmp = "\n".join(["Log ID: %d; %s" % xx for xx in zip(log_ids, logfns)])
    utils.print_info(tmp, 1)

    file_ids, fns = get_fileinfo(db, obs_ids)
    assert len(file_ids) == len(fns)
    print "Will also remove %d files" % len(file_ids)
    tmp = "\n".join(["File ID: %d; %s" % xx for xx in zip(file_ids, fns)])
    utils.print_info(tmp, 1)
    
    diag_ids, diagfns = get_diaginfo(db, file_ids)
    assert len(diag_ids) == len(diagfns)
    print "Will also remove %d diagnostics" % len(diag_ids)
    tmp = "\n".join(["Diagnostic ID: %d; %s" % xx for xx in zip(diag_ids, diagfns)])
    utils.print_info(tmp, 1)
    
    mysqldumpstr = dump_db_entries(db, dir_id, obs_ids, log_ids, file_ids, diag_ids)
    utils.print_info("MySQL dump:\n%s" % mysqldumpstr, 2)
    
    if not args.dryrun:
        try:
            # Make back-up directory
            os.makedirs(backupdir)
            # Write mysql dump
            with open(os.path.join(backupdir, "db_entries.sql"), 'w') as ff:
                ff.write(mysqldumpstr)
            # Move files
            for src in fns+logfns+diagfns:
                fn = os.path.basename(src)
                dest = os.path.join(backupdir, fn)
                if os.path.isfile(src):
                    # Make sure file exists (it may have already been deleted)
                    shutil.move(src, dest)
            # Remove entries from the database
            with db.transaction() as conn:
                # Remove diagnostic entries
                delete = db.diagnostics.delete().\
                            where(db.diagnostics.c.diagnostic_id.in_(diag_ids))
                results = conn.execute(delete)
                results.close()
                # Remove file entries 
                # (newest first because of foreign key constraints - parent_file_id column)
                for file_id in reversed(sorted(file_ids)):
                    delete = db.files.delete().\
                                where(db.files.c.file_id == file_id)
                    results = conn.execute(delete)
                    results.close()
                # logs
                delete = db.logs.delete().\
                            where(db.logs.c.log_id.in_(log_ids))
                results = conn.execute(delete)
                results.close()
                # obs
                delete = db.obs.delete().\
                            where(db.obs.c.obs_id.in_(obs_ids))
                results = conn.execute(delete)
                results.close()
                # directory
                delete = db.directories.delete().\
                            where(db.directories.c.dir_id == dir_id)
                results = conn.execute(delete)
                results.close()
        except:
            print "Error encountered! Will attempt to un-move files."
            # Try to unmove files
            for src in fns+logfns+diagfns:
                fn = os.path.basename(src)
                dest = os.path.join(backupdir, fn)
                if os.path.isfile(dest) and not os.path.isfile(src):
                    shutil.move(dest, src)
            if os.path.isdir(backupdir):
                try:
                    os.rmdir(backupdir)
                except:
                    print "Could not remove back-up dir %s" % backupdir
            raise
        else:
            print "Successfully scrubbed %s (ID: %d)" % (dir_toremove, dir_id)
Example #14
0
def get_files_by_type(psrnames, filetype, rcvr=None):
    """Get a list of data base rows containing
        file and obs information for the given pulsar,
        filetype and receiver.

        Inputs:
            psrnames: The names of the pulsar to match.
            filetype: The type of files to match.
            rcvr: The name of the receiver to match.
                (Default: Match all)

        Outputs:
            rows: A list of rows containing file and obs
                information for each matching file.
    """
    db = database.Database()

    # Select psrs to whereclause
    psrname = utils.get_prefname(psrnames[0])
    tmp = (db.obs.c.sourcename == psrname)
    for psrname in psrnames[1:]:
        psrname = utils.get_prefname(psrname)
        tmp |= (db.obs.c.sourcename == psrname)

    getwhere = FILETYPE_TO_WHERE[filetype]
    whereclause = tmp & getwhere()

    if rcvr is not None:
        whereclause &= (db.obs.c.rcvr == rcvr)

    with db.transaction() as conn:
        select = db.select([db.files,
                            db.obs.c.dir_id,
                            db.obs.c.sourcename,
                            db.obs.c.obstype,
                            db.obs.c.start_mjd,
                            db.obs.c.length,
                            db.obs.c.bw,
                            db.obs.c.freq,
                            db.obs.c.nsubints,
                            db.obs.c.nsubbands,
                            db.obs.c.obsband,
                            db.obs.c.rcvr],
                    from_obj=[db.files.\
                        outerjoin(db.obs,
                            onclause=(db.files.c.obs_id ==
                                        db.obs.c.obs_id))]).\
                    where(whereclause).\
                    order_by(db.files.c.added.asc())
        result = conn.execute(select)
        rows = result.fetchall()
        result.close()

    # Only keep most recently added file for each
    # observation. Rows are sorted in the query above.
    obs_ids = []
    for ii in reversed(range(len(rows))):
        if rows[ii]['obs_id'] in obs_ids:
            rows.pop(ii)
        else:
            obs_ids.append(rows[ii]['obs_id'])
    return rows
Example #15
0
def get_all_whereclause():
    db = database.Database()
    whereclause = True
    return whereclause
Example #16
0
def main():
    db = database.Database()
    caldbfn = calibrate.update_caldb(db, args.sourcename, force=True)
    print "Updated %s" % caldbfn
Example #17
0
def get_corrected_whereclause():
    db = database.Database()
    whereclause = (db.files.c.stage == 'corrected')
    return whereclause
Example #18
0
def get_cleaned_whereclause():
    db = database.Database()
    whereclause = (db.files.c.stage == 'cleaned') & \
                  (db.files.c.qcpassed == True)
    return whereclause
Example #19
0
def __obslog_db_match(obsdt_utc, names):
    """Find entries in observing log database matching the given information.

        Inputs:
            obsdt_utc: The UTC datetime at the start of the observation
            names: Object names to match

        Outputs:
            logentries: Matching log entries.
    """
    db = database.Database('obslog')

    utcstart_col = sa.cast(db.obsinfo.c.obstimestamp, sa.DateTime)

    # Find entries within +- 1 day of observation start time
    start = obsdt_utc - datetime.timedelta(days=1)
    end = obsdt_utc + datetime.timedelta(days=1)
    with db.transaction() as conn:
        select = db.select([db.obsinfo.c.object.label('name'),
                            (db.obsinfo.c.lst/3600.0).label('lststart'),
                            utcstart_col.label('utcstart'),
                            db.obsinfo.c.azim.label('az'),
                            db.obsinfo.c.elev.label('alt'),
                            db.obsinfo.c.scan.label('scannum'),
                            db.obsinfo.c.lon,
                            db.obsinfo.c.lat]).\
                    where(db.obsinfo.c.object.in_(names) & (utcstart_col >= start) &
                          (utcstart_col <= end))
        result = conn.execute(select)
        rows = result.fetchall()
        result.close()

    utils.print_debug(
        "Found %d matching obslog DB entries "
        "(name: %s; UTC: %s)" %
        (len(rows), ", ".join(names), obsdt_utc.strftime("%c")), 'correct')
    logentries = []
    for row in rows:
        # refine matching based on time
        utils.print_debug("%s" % row, 'correct')
        twentyfivesec = datetime.timedelta(seconds=25)
        logdt_utc = UTC_TZ.localize(row['utcstart'])
        if (logdt_utc - twentyfivesec) <= obsdt_utc <= (logdt_utc +
                                                        twentyfivesec):
            # Compute a few values to be consistent with obslog file parsing
            utc_hrs = row['utcstart'].hour + (
                row['utcstart'].minute +
                (row['utcstart'].second + row['utcstart'].microsecond * 1e-6) /
                60.0) / 60.0

            logdt_local = logdt_utc.astimezone(BERLIN_TZ)
            localdate = logdt_local.date()

            entry = dict(row)
            entry['scannum'] = str(row['scannum'])
            entry['utcstart'] = utc_hrs
            entry['utc'] = row['utcstart'].strftime('%c')
            entry['localdate'] = localdate
            entry['catalog_rastr'] = rs.utils.deg_to_hmsstr(row['lon'],
                                                            decpnts=3,
                                                            style='units')[0]
            entry['catalog_decstr'] = rs.utils.deg_to_dmsstr(row['lat'],
                                                             decpnts=3,
                                                             style='units')[0]

            logentries.append(entry)
    return logentries
Example #20
0
def get_loaded_whereclause():
    db = database.Database()
    whereclause = (db.files.c.status == 'done')
    return whereclause