Пример #1
0
    def test_load_filename_gtt(self):
        files = [
            'test1.fits.fz', {
                dmdbdefs.DB_COL_FILENAME: 'test.fits',
                dmdbdefs.DB_COL_COMPRESSION: '.fz'
            }, {
                'filename': 'test2.fits',
                'compression': None
            }, {
                dmdbdefs.DB_COL_FILENAME: 'test4.fts.fz'
            }, {
                'filename': 'test3.fits.fz'
            }
        ]
        dbh = dmdbi.DesDmDbi(self.sfile, 'db-test')
        tab = dbh.load_filename_gtt(files)
        curs = dbh.cursor()
        curs.execute('select count(*) from %s' % tab)
        self.assertEqual(curs.fetchall()[0][0], len(files))
        curs.execute("select " + (',').join(
            [dmdbdefs.DB_COL_FILENAME, dmdbdefs.DB_COL_COMPRESSION]) +
                     " from " + tab + " where " + dmdbdefs.DB_COL_FILENAME +
                     "='test4.fts'")
        self.assertEqual('.fz', curs.fetchall()[0][-1])

        dbh.commit()
        curs.execute('select count(*) from %s' % tab)
        self.assertEqual(curs.fetchall()[0][0], 0)

        self.assertRaises(ValueError, dbh.load_filename_gtt, [12345])
Пример #2
0
 def test_get_job_file_mvmt_info(self):
     dbh = dmdbi.DesDmDbi(self.sfile, 'db-test')
     data = dbh.get_job_file_mvmt_info()
     self.assertTrue('descampuscluster' in data)
     self.assertTrue('desar2home' in data['descampuscluster'])
     self.assertTrue('no_archive' in data['descampuscluster']['desar2home'])
     self.assertTrue('mvmtclass' in data['descampuscluster']['desar2home']
                     ['no_archive'])
Пример #3
0
    def test_get_datafile_metadata(self):
        dbh = dmdbi.DesDmDbi(self.sfile, 'db-test')
        data = dbh.get_datafile_metadata('cat_finalcut')
        self.assertEqual(len(data), 2)
        self.assertEqual(data[0], 'SE_OBJECT')
        self.assertTrue('PRIMARY' in data[1].keys())

        self.assertRaises(ValueError, dbh.get_datafile_metadata,
                          'cat_something')
Пример #4
0
    def test_load_id_gtt(self):
        ids = [1, 5, 10, 15, 20, 25]
        dbh = dmdbi.DesDmDbi(self.sfile, 'db-test')
        tab = dbh.load_id_gtt(ids)
        curs = dbh.cursor()
        curs.execute('select count(*) from %s' % tab)
        self.assertEqual(curs.fetchall()[0][0], len(ids))
        dbh.commit()
        curs.execute('select count(*) from %s' % tab)
        self.assertEqual(curs.fetchall()[0][0], 0)

        ids = [1, 2, 3.5, 6]
        self.assertRaises(ValueError, dbh.load_id_gtt, ids)
Пример #5
0
    def test_task_interaction(self):
        dbh = dmdbi.DesDmDbi(self.sfile, 'db-test')
        root_id = dbh.create_task('root_task',
                                  None,
                                  i_am_root=True,
                                  do_begin=True)
        parent_id = dbh.create_task('parent_task',
                                    None,
                                    parent_task_id=root_id,
                                    root_task_id=root_id,
                                    label='wrapper',
                                    do_begin=True)
        child1 = dbh.create_task('exec1',
                                 None,
                                 parent_id,
                                 root_id,
                                 label='child1')
        dbh.begin_task(child1, True)
        dbh.end_task(child1, 0, True)
        child2 = dbh.create_task('exec1',
                                 None,
                                 parent_id,
                                 root_id,
                                 label='child2',
                                 do_begin=False,
                                 do_commit=True)
        curs = dbh.cursor()
        curs.execute("select start_time from task where id=%i" % child2)
        res = curs.fetchall()
        self.assertEqual(len(res), 1)
        self.assertIsNone(res[0][0])
        dbh.end_task(child2, 1)
        curs.execute(
            "select name, end_time, status from task where root_task_id=%i" %
            root_id)
        res = curs.fetchall()
        self.assertEqual(len(res), 4)

        curs.execute("select status from task where id=%i" % child2)
        res = curs.fetchall()
        self.assertEqual(len(res), 1)
        curs.execute("select status from task where id=%i" % child1)
        res = curs.fetchall()
        self.assertEqual(len(res), 1)
        self.assertEqual(res[0][0], 0)
        dbh.rollback()
        curs.execute("select status from task where id=%i" % child2)
        res = curs.fetchall()
        self.assertEqual(len(res), 1)
        self.assertIsNone(res[0][0])
Пример #6
0
    def test_load_artifact_gtt(self):
        files = [{
            dmdbdefs.DB_COL_FILENAME: 'test.fits',
            dmdbdefs.DB_COL_COMPRESSION: '.fz',
            dmdbdefs.DB_COL_FILESIZE: 128,
            dmdbdefs.DB_COL_MD5SUM: 'ab66249844ae'
        }, {
            'filename': 'test2.fits',
            'compression': None,
            dmdbdefs.DB_COL_FILESIZE.lower(): 112233,
            dmdbdefs.DB_COL_MD5SUM.lower(): 'ab66249844'
        }, {
            'fullname': 'test3.fits.fz'
        }, {
            dmdbdefs.DB_COL_FILENAME: 'test4.fts.fz'
        }, {
            'filename': 'test4.fits'
        }]
        dbh = dmdbi.DesDmDbi(self.sfile, 'db-test')
        tab = dbh.load_artifact_gtt(files)
        curs = dbh.cursor()
        curs.execute('select count(*) from %s' % tab)
        self.assertEqual(curs.fetchall()[0][0], len(files))
        curs.execute("select " + (',').join([
            dmdbdefs.DB_COL_FILENAME, dmdbdefs.DB_COL_COMPRESSION,
            dmdbdefs.DB_COL_MD5SUM, dmdbdefs.DB_COL_FILESIZE
        ]) + " from " + tab + " where " + dmdbdefs.DB_COL_FILENAME +
                     "='test.fits'")
        res = curs.fetchall()
        self.assertEqual(res[0][-1], 128)
        dbh.commit()
        curs.execute('select count(*) from %s' % tab)
        self.assertEqual(curs.fetchall()[0][0], 0)

        files = [{
            dmdbdefs.DB_COL_FILENAME: 'test.fits',
            dmdbdefs.DB_COL_COMPRESSION: '.fz',
            dmdbdefs.DB_COL_FILESIZE: 128,
            dmdbdefs.DB_COL_MD5SUM: 'ab66249844ae'
        }, {
            'filenam': 'test2.fits',
            'compression': None,
            dmdbdefs.DB_COL_FILESIZE.lower(): 112233,
            dmdbdefs.DB_COL_MD5SUM.lower(): 'ab66249844'
        }, {
            'fullname': 'test3.fits.fz'
        }, {
            dmdbdefs.DB_COL_FILENAME: 'test4.fts.fz'
        }]
        self.assertRaises(ValueError, dbh.load_artifact_gtt, files)
def main(argv):
    """ Program entry point """

    args = parse_cmdline(argv)
    dbh = desdmdbi.DesDmDbi(args['des_services'], args['des_db_section'])

    print "Creating task with name='dts' and label='%s'" % args['label']
    task_id = dbh.create_task(name='dts', info_table=None, parent_task_id=None,
                              root_task_id=None, i_am_root=True, label=args['label'],
                              do_begin=True, do_commit=True)
    row = {'task_id': task_id, 'prov_msg': 'dts file receiver %s'% args['label']}
    dbh.basic_insert_row('FILE_REGISTRATION', row)
    dbh.commit()
    dbh.close()

    print "Update the DTS config file:   dts_task_id = %d" % task_id
Пример #8
0
    def reconnect(self):
        """ Method to reconnect to the database

            Parameters
            ----------
            None

            Returns
            -------
            None

        """
        if self.dbh is None:
            self.dbh = desdmdbi.DesDmDbi(threaded=self.threaded)
        else:
            self.dbh.reconnect()
        self.cursor = self.dbh.cursor()
        self.cursor.prepare(self.sql)
Пример #9
0
def get_filemgmt_class(args):
    """ Figure out which filemgmt class to use """
    filemgmt_class = None

    archive = args['archive_name']

    if args['classmgmt']:
        filemgmt_class = args['classmgmt']
    elif args['wclfile']:
        if args['wclfile'] is not None:
            from intgutils.wcl import WCL
            config = WCL()
            with open(args['wclfile'], 'r') as configfh:
                config.read(configfh)
        if archive in config['archive']:
            filemgmt_class = config['archive'][archive]['filemgmt']
        else:
            miscutils.fwdie(f"Invalid archive name ({archive})", 1)
    else:
        import despydmdb.desdmdbi as desdmdbi
        with desdmdbi.DesDmDbi(args['des_services'], args['section']) as dbh:
            curs = dbh.cursor()
            sql = f"select filemgmt from ops_archive where name='{archive}'"
            curs.execute(sql)
            rows = curs.fetchall()
            if rows:
                filemgmt_class = rows[0][0]
            else:
                miscutils.fwdie(f"Invalid archive name ({archive})", 1)

    if filemgmt_class is None or '.' not in filemgmt_class:
        print(f"Error: Invalid filemgmt class name ({filemgmt_class})",
              flush=True)
        print("\tMake sure it contains at least 1 period.", flush=True)
        miscutils.fwdie("Invalid filemgmt class name", 1)

    return filemgmt_class
Пример #10
0
    def __init__(self,
                 semname,
                 task_id,
                 desfile=None,
                 section=None,
                 connection=None,
                 threaded=False):
        """
        Create the DB connection and do the semaphore wait.
        """
        self.desfile = desfile
        self.section = section
        self.semname = semname
        self.task_id = task_id
        self.slot = None

        miscutils.fwdebug(3, f"SEMAPHORE_DEBUG",
                          "SEM - INFO - semname {self.semname}")
        miscutils.fwdebug(3, "SEMAPHORE_DEBUG",
                          "SEM - BEG - db-specific imports")
        import despydmdb.desdmdbi as desdmdbi
        import cx_Oracle
        miscutils.fwdebug(3, "SEMAPHORE_DEBUG",
                          "SEM - END - db-specific imports")

        miscutils.fwdebug(3, "SEMAPHORE_DEBUG", "SEM - BEG - db connection")
        self.dbh = desdmdbi.DesDmDbi(desfile, section, threaded=threaded)
        miscutils.fwdebug(3, "SEMAPHORE_DEBUG", "SEM - END - db connection")

        curs = self.dbh.cursor()

        sql = f"select count(*) from semlock where name={self.dbh.get_named_bind_string('name')}"
        curs.execute(sql, {'name': semname})
        num_slots = curs.fetchone()[0]
        if num_slots == 0:
            miscutils.fwdebug(0, "SEMAPHORE_DEBUG",
                              f"SEM - ERROR - no locks with name {semname}")
            raise ValueError(f'No locks with name {semname}')

        self.id = self.dbh.get_seq_next_value('seminfo_seq')
        self.dbh.basic_insert_row(
            'seminfo', {
                'id': self.id,
                'name': self.semname,
                'request_time': self.dbh.get_current_timestamp_str(),
                'task_id': task_id,
                'num_slots': num_slots
            })
        self.dbh.commit()

        self.slot = curs.var(cx_Oracle.NUMBER)
        done = False
        trycnt = 1
        while not done and trycnt <= MAXTRIES:
            try:
                miscutils.fwdebug(3, "SEMAPHORE_DEBUG", "SEM - BEG - wait")
                curs.callproc("SEM_WAIT", [self.semname, self.slot])
                miscutils.fwdebug(3, "SEMAPHORE_DEBUG", "SEM - END - wait")
                miscutils.fwdebug(3, "SEMAPHORE_DEBUG",
                                  f"SEM - INFO - slot {self.slot}")
                done = True
                if not self.dbh.is_oracle():
                    self.dbh.commit()  # test database must commit
            except Exception as e:
                miscutils.fwdebug(0, "SEMAPHORE_DEBUG",
                                  f"SEM - ERROR - {str(e)}")

                time.sleep(TRYINTERVAL)

                miscutils.fwdebug(3, "SEMAPHORE_DEBUG",
                                  "SEM - BEG - remake db connection")
                self.dbh = desdmdbi.DesDmDbi(desfile, section)
                miscutils.fwdebug(3, "SEMAPHORE_DEBUG",
                                  "SEM - END - remake db connection")

                curs = self.dbh.cursor()
                self.slot = curs.var(cx_Oracle.NUMBER)

                miscutils.fwdebug(3, "SEMAPHORE_DEBUG", "SEM - BEG - dequeue")
                curs.callproc("SEM_DEQUEUE", [self.semname, self.slot])
                miscutils.fwdebug(3, "SEMAPHORE_DEBUG", "SEM - END - dequeue")

                trycnt += 1

        if done:
            # need different connection to do the commit of the grant info as commit will release lock
            dbh2 = desdmdbi.DesDmDbi(desfile, section, connection)
            dbh2.basic_update_row(
                'SEMINFO', {
                    'grant_time': dbh2.get_current_timestamp_str(),
                    'num_requests': trycnt,
                    'slot': self.slot
                }, {'id': self.id})
            dbh2.commit()
Пример #11
0
def run_compare(args):
    """ Method to determine what data need to be compared

        Parameters
        ----------
        args : list of command line arguments

        Returns
        -------
        the result from do_compare

    """
    # connect to the database
    if args.dbh is None:
        dbh = desdmdbi.DesDmDbi(args.des_services, args.section)
    else:
        dbh = args.dbh
    # do some quick validation
    if args.date_range and args.pfwid:
        print("Date_range was specified, thus pfwid cannot be.")
    if args.relpath and (args.reqnum or args.unitname or args.attnum
                         or args.tag or args.pfwid):
        print(
            "Relpath was specified, thus reqnum, unitname, attnum, tag, and pfwid cannot be specified."
        )
        sys.exit(1)
    if args.reqnum and (args.tag or args.pfwid):
        print("Reqnum was specified, thus tag and pfwid cannot be specified.")
        sys.exit(1)
    if args.tag and args.pfwid:
        print("Tag was specified, thus pfwid cannot be specified.")
        sys.exit(1)
    if (args.unitname or args.attnum) and not args.reqnum:
        print(
            "Unitname and/or attnum were specified, but reqnum was not, please supply a reqnum and run again."
        )
        sys.exit(1)

    # if dealing with a date range then get the relevant pfw_attempt_ids
    if args.date_range:
        dates = args.date_range.split(',')
        whereclause = []
        if len(dates) == 1:
            whereclause.append(
                f"submittime>=TO_DATE('{dates[0]} 00:00:01', 'YYYY-MM-DD HH24:MI:SS') and submittime<=TO_DATE('{dates[0]} 23:59:59', 'YYYY-MM-DD HH24:MI:SS')"
            )
        else:
            whereclause.append(
                f"submittime>=TO_DATE('{dates[0]} 00:00:01', 'YYYY-MM-DD HH24:MI:SS') and submittime<=TO_DATE('{dates[1]} 23:59:59', 'YYYY-MM-DD HH24:MI:SS')"
            )
        if args.pipeline:
            whereclause.append(f"subpipeprod='{args.pipeline}'")
        if args.reqnum:
            whereclause.append(f"reqnum={args.reqnum}")
            if args.unitname:
                whereclause.append(f"unitname='{args.unitname}'")
            if args.attnum:
                whereclause.append(f"attnum={args.attnum}")
        elif args.tag:
            whereclause.append(
                f"id in (select pfw_attempt_id from proctag where tag='{args.tag}')"
            )
        pfwids = dbutils.get_pfw_attempt_ids_where(dbh, whereclause, 'id')

        if not args.silent:
            print(
                f"Found {len(pfwids):d} pfw_attempt_id's for the given date range (and any qualifying tag/reqnum)"
            )
        if not pfwids:
            return 0
        return multi_compare(dbh, pfwids, args)

    pfwids = []
    # if dealing with a tag then get the relevant pfw_attempt_ids
    if args.tag:
        pfwids = dbutils.get_pfw_attempt_id_from_tag(dbh, args.tag)
    # if dealing with a triplet
    elif args.reqnum:
        pfwids = dbutils.get_pfw_attempt_ids_from_triplet(dbh, args)
        args.reqnum = None
        args.unitname = None
        args.attnum = None
    elif args.pfwid and ',' in args.pfwid:
        pfwids = args.pfwid.split(',')
    # if only a single comparison was requested (single pfw_attempt_id, triplet (reqnum, uniname, attnum), or path)
    if not pfwids:
        return do_compare(dbh, args)
    if len(pfwids) == 1:
        args.pfwid = pfwids[0]
        return do_compare(dbh, args)
    pfwids.sort()  # put them in order
    return multi_compare(dbh, pfwids, args)
Пример #12
0
def main():
    """ Main control """

    args = parse_and_check_cmd_line(sys.argv[1:])
    dbh = desdbi.DesDmDbi(args.des_services, args.section)
    # get all pfw_attempt_ids for the given tag
    if args.tag:
        if not args.filetype:
            print(
                'WARNING, specifying a tag without a filetype will delete all data from the tag.'
            )
            should_continue = input(
                "Please verify you want to do this [yes/no]: ")
            shdelchar = should_continue[0].lower()
            if shdelchar in ['y', 'yes']:
                pass
            else:
                sys.exit(0)
        pfw_ids = dbutils.get_pfw_attempt_id_from_tag(dbh, args.tag)
    elif args.pfwid and ',' in args.pfwid:
        pfw_ids = args.pfwid.split(',')
    elif args.reqnum:
        pfw_ids = dbutils.get_pfw_attempt_ids_from_triplet(dbh, args)
        args.reqnum = None
        args.unitname = None
        args.attnum = None
    else:
        pfw_ids = [args.pfwid]
    pfw_ids.sort()  # put them in order
    all_data = {}
    merged_comparison_info = {}
    # go through each pfw_attempt_id and gather the needed data
    for pid in pfw_ids:
        args.pfwid = pid
        archive_root, archive_path, relpath, state, operator, pfwid, part = gather_data(
            dbh, args)
        if not archive_root and not relpath:
            print(f"    Skipping pfw_attempt_id {pfwid}.")
            continue
        files_from_disk, dup = diskutils.get_files_from_disk(
            relpath, archive_root)
        files_from_db, dup = dbutils.get_files_from_db(dbh, relpath,
                                                       args.archive, pfwid,
                                                       args.filetype)
        # if filetype is set then trim down the disk results
        if args.filetype is not None:
            newfiles = {}
            for filename, val in files_from_db.items():
                if filename in files_from_disk:
                    newfiles[filename] = files_from_disk[filename]
            files_from_disk = newfiles

        comparison_info = diskutils.compare_db_disk(files_from_db,
                                                    files_from_disk,
                                                    dup,
                                                    False,
                                                    archive_root=archive_root,
                                                    pfwid=pid)
        merged_comparison_info[pfwid] = comparison_info
        # add it to the master dictionary
        all_data[pfwid] = fmutils.DataObject(
            **{
                'archive_root': archive_root,
                'archive_path': archive_path,
                'relpath': relpath,
                'state': state,
                'operator': operator,
                'pfwid': pfwid,
                'dofiles': args.filetype is not None or part,
                'files_from_disk': files_from_disk,
                'dup': dup,
                'files_from_db': files_from_db,
                'comparison_info': comparison_info
            })

    if not all_data:
        print("Nothing to do")
        sys.exit(0)
    filesize = 0.0
    bad_filesize = 0.0
    bad_pfwids = []
    ffdb = 0
    ffd = 0
    bad_ffdb = 0
    bad_ffd = 0
    # gather the stats for reporting
    empty_pfwids = []
    for data in all_data.values():
        # if the data is not junk and no filetype was specified then it cannot be deleted
        if data.state != 'JUNK' and args.filetype is None:
            for filename, val in data.files_from_disk.items():
                #print(filename,val
                bad_filesize += val['filesize']
            bad_pfwids.append(str(data.pfwid))
            bad_ffdb += len(data.files_from_db)
            bad_ffd += len(data.files_from_disk)
        else:
            for filename, val in data.files_from_disk.items():
                #print(filename,val
                filesize += val['filesize']
            ffdb += len(data.files_from_db)
            ffd += len(data.files_from_disk)
            if not data.files_from_db and not data.files_from_disk:
                empty_pfwids.append(data.pfwid)
    for pid in empty_pfwids:
        del all_data[pid]
        del merged_comparison_info[pid]

    filesize, fend = get_size_unit(filesize)

    bad_filesize, bfend = get_size_unit(bad_filesize)

    # report the results of what was found
    if not files_from_db:
        print("\nNo files in database to delete.")
        sys.exit(0)
    if not files_from_disk:
        print("\nNo files on disk to delete.")
        sys.exit(0)

    if bad_pfwids:
        print(
            "\nThe following data cannot be deleted as the associated attempts have not been marked as 'JUNK' (ATTEMPT_STATE.DATA_STATE):"
        )
        if len(bad_pfwids) == 1:
            pid = list(all_data.keys())[0]
            operator = all_data[pid].operator
            archive_path = all_data[pid].archive_path
        else:
            operator = None
            archive_path = None
        report(operator, archive_path, args.archive, bad_ffdb, bad_ffd,
               bad_filesize, bfend, bad_pfwids)
        if len(bad_pfwids) == len(all_data):
            print(" No data to delete\n")
            sys.exit(1)
    for bpid in bad_pfwids:
        del all_data[int(bpid)]
        del merged_comparison_info[int(bpid)]

    if len(all_data) == 1:
        pid = list(all_data.keys())[0]
        operator = all_data[pid].operator
        archive_path = all_data[pid].archive_path
    else:
        operator = None
        archive_path = None
    if bad_pfwids:
        print('\nFiles that can be deleted')

    report(operator, archive_path, args.archive, ffdb, ffd, filesize, fend)

    if args.dryrun:
        sys.exit(0)

    shdelchar = 'x'
    while shdelchar not in ['n', 'y']:
        print("")
        # query if we should proceed
        should_delete = input(
            "Do you wish to continue with deletion [yes/no/diff/print]?  ")
        shdelchar = should_delete[0].lower()

        if shdelchar in ['p', 'print']:
            print_files(merged_comparison_info)

        elif shdelchar in ['d', 'diff']:
            diff_files(merged_comparison_info)

        elif shdelchar in ['y', 'yes']:
            # loop over each pfwid
            for data in all_data.values():
                # if deleting specific files
                if data.dofiles:
                    good = diskutils.del_part_files_from_disk(
                        data.files_from_db, data.archive_root)
                    if len(good) != len(data.files_from_db):
                        print(
                            "Warning, not all files on disk could be deleted. Only removing the deleted ones from the database."
                        )
                    dbutils.del_part_files_from_db(dbh, good)
                    # check to see if this is the last of the files in the attempt
                    if dbutils.get_file_count_by_pfwid(dbh, data.pfwid) != 0:
                        depth = 'PRUNED'  # there are still some files on disk for this pfw_attempt_id
                    else:
                        depth = 'PURGED'  # these were the last files for the pfw_attempt_id
                else:
                    try:
                        diskutils.del_files_from_disk(data.archive_path)
                    except Exception as exept:
                        print("Error encountered when deleting files: ",
                              str(exept))
                        print("Aborting")
                        raise
                    errfls = {}
                    for (dirpath, _, filenames) in os.walk(
                            os.path.join(data.archive_root, data.relpath)):
                        for filename in filenames:
                            errfls[filename] = dirpath
                    if errfls:
                        delfiles = []
                        depth = 'PRUNED'
                        for filename, val in data.files_from_disk.items():
                            if filename not in errfls:
                                delfiles.append((filename))
                        dbutils.del_part_files_from_db_by_name(
                            dbh, data.relpath, args.archive, delfiles)
                    else:  # has to be purged as only an entire attempt can be deleted this way
                        depth = 'PURGED'
                        dbutils.del_files_from_db(dbh, data.relpath,
                                                  args.archive)
                dbutils.update_attempt_state(dbh, depth, data.pfwid)
        elif shdelchar in ['n', 'no']:
            print("Exiting.")
        else:
            print(f"Unknown input ({shdelchar}).   Ignoring")
Пример #13
0
 def test_init(self):
     dbh = dmdbi.DesDmDbi(self.sfile, 'db-test')
def main(args):
    parser = argparse.ArgumentParser(description='delete mass DB records for testing purposes')
    parser.add_argument('--inputs', action='store_true', default=False)
    #parser.add_argument('--filetypes', action='store')

    args = vars(parser.parse_args())

    dbh = desdmdbi.DesDmDbi()

    if dbh.configdict['name'].lower() != 'destest':
        print "This command can only be run against the destest database."
        print "   User = '******'" % dbh.configdict['user']
        print "   Database Name = '%s'" % dbh.configdict['name']
        print "   Section = %s" % dbh.which_services_section()
        print "   File = %s" % dbh.which_services_file()
        exit(1)

    # delete provenance
    # OPM_ARTIFACT auto deleted when deleting from genfile
    if 'USER' in os.environ:
        for tname in ['OPM_WAS_GENERATED_BY','OPM_WAS_DERIVED_FROM','OPM_USED']:
            delete_from_table(dbh, tname, os.environ['USER'])
    else:
        print "Skipping OPM tables because couldn't determine user"

    # non-file tables
    for tname in [ 'qc_processed_value', 'qc_processed_message', 'pfw_message',
        'pfw_data_query', 'pfw_attempt_label', 'pfw_attempt_val',
        'pfw_exec', 'pfw_wrapper', 'pfw_job', 'pfw_block',
        'pfw_attempt', 'pfw_unit', 'pfw_request',
        'seminfo', 'transfer_file', 'transfer_batch',
        'task']:
        delete_from_table(dbh, tname)

    empty_se_objects_table(dbh)

    for tname in ['catalog', 'image', 'scamp_qa', 'psf_qa']:
        delete_from_table(dbh, tname)


    for ftype in ['cal_biascor', 'cal_dflatcor', 'xtalked_bias', 'xtalked_dflat']:
        delete_from_table_by_ftype(dbh, 'CALIBRATION', ftype)

    # delete output files
    for ftype in ['cat_psfex','cat_satstars','cat_scamp','cat_scamp_full','cat_trailbox','head_scamp','head_scamp_full','psfex_model','qa_scamp','red_bkg','red_check','xml_psfex','xml_scamp','wcl','log','list','junk_tar']:
        delete_from_genfile_table(dbh, ftype)

    if args['inputs']:
        for tname in ['CALIBRATION', 'EXPOSURE']:
            delete_from_table(dbh, tname)
        for ftype in ['config', 'cal_lintable', 'cal_xtalk']:
            delete_from_genfile_table(dbh, ftype)


    # delete entries from file "location" table for files no longer having metadata
    #delete_from_cache_table(dbh)
    delete_from_file_archive_table(dbh)


    print "Are you sure you want to delete all these rows (Y/N)? ",
    ans = sys.stdin.read(1)
    if ans.lower() == 'y':
        print "Committing the deletions"
        dbh.commit()
    else:
        print "Rolling back database.  No rows are deleted"
        dbh.rollback()
Пример #15
0
 def test_get_metadata(self):
     dbh = dmdbi.DesDmDbi(self.sfile, 'db-test')
     data = dbh.get_metadata()
     self.assertTrue('ccdnum' in data)
     self.assertIsNone(data['ccdnum']['ccdnum']['data_type'])
Пример #16
0
 def test_empty_gtt(self):
     dbh = dmdbi.DesDmDbi(self.sfile, 'db-test')
     self.assertRaises(ValueError, dbh.empty_gtt, 'gt_tab')
Пример #17
0
 def test_get_all_filetype_metadata(self):
     dbh = dmdbi.DesDmDbi(self.sfile, 'db-test')
     data = dbh.get_all_filetype_metadata()
     self.assertTrue('cat_finalcut' in data)
     self.assertTrue('hdus' in data['cat_finalcut'])
     self.assertTrue('primary' in data['cat_finalcut']['hdus'])
Пример #18
0
 def test_get_site_info(self):
     dbh = dmdbi.DesDmDbi(self.sfile, 'db-test')
     data = dbh.get_site_info()
     self.assertTrue('descampuscluster' in data)
     self.assertTrue('gridtype' in data['descampuscluster'])
Пример #19
0
    m = re.search(r"([^_]+)_r([^p]+)p([^_]+)", run)
    if m is None:
        print("Error:  cannot parse run", run)
        sys.exit(1)

    unitname = m.group(1)
    reqnum = m.group(2)
    attnum = m.group(3)

    print("unitname =", unitname)
    print("reqnum =", reqnum)
    print("attnum =", attnum)
    print("\n")

    dbh = desdmdbi.DesDmDbi()

    try:
        delete_db_run(dbh, unitname, reqnum, attnum, 1)
    except:
        print(
            "Caught exception.  Explicitly rolling back database.  No rows are deleted"
        )
        dbh.rollback()
        raise

    print("Are you sure you want to delete all these rows (Y/N)? ")
    ans = sys.stdin.read(1)
    if ans.lower() == 'y':
        print("Committing the deletions")
        dbh.commit()
Пример #20
0
 def test_get_archive_transfer_info(self):
     dbh = dmdbi.DesDmDbi(self.sfile, 'db-test')
     data = dbh.get_archive_transfer_info()
     self.assertEqual(len(data), 2)
Пример #21
0
 def test_archive_info(self):
     dbh = dmdbi.DesDmDbi(self.sfile, 'db-test')
     data = dbh.get_archive_info()
     self.assertTrue('decarchive' in data)
     self.assertTrue('fileutils' in data['decarchive'])